diff --git a/.kitchen.yml b/.kitchen.yml deleted file mode 100644 index a9626ee1..00000000 --- a/.kitchen.yml +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - ---- -provisioner: - name: terraform - -platforms: - - name: local - -verifier: - name: terraform - systems: - - name: system - backend: gcp - -suites: - - name: full - driver: - name: terraform - command_timeout: 1800 - root_module_directory: test/fixtures/full - # setting version verification to false since it requires TF to be less than v1.1 - verify_version: false diff --git a/Makefile b/Makefile index 705770a0..75f060e5 100644 --- a/Makefile +++ b/Makefile @@ -64,7 +64,7 @@ docker_test_integration: -e SERVICE_ACCOUNT_JSON \ -v $(CURDIR):/workspace \ $(REGISTRY_URL)/${DOCKER_IMAGE_DEVELOPER_TOOLS}:${DOCKER_TAG_VERSION_DEVELOPER_TOOLS} \ - /usr/local/bin/test_integration.sh + cft test run all # Execute lint tests within the docker container .PHONY: docker_test_lint diff --git a/build/int.cloudbuild.yaml b/build/int.cloudbuild.yaml index f351305d..e9780177 100644 --- a/build/int.cloudbuild.yaml +++ b/build/int.cloudbuild.yaml @@ -24,18 +24,9 @@ steps: - 'TF_VAR_org_id=$_ORG_ID' - 'TF_VAR_folder_id=$_FOLDER_ID' - 'TF_VAR_billing_account=$_BILLING_ACCOUNT' -- id: create +- id: multiple-tables name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' - args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do create'] -- id: converge - name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' - args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do converge'] -- id: verify - name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' - args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do verify'] -- id: destroy - name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' - args: ['/bin/bash', '-c', 'source /usr/local/bin/task_helper_functions.sh && kitchen_do destroy'] + args: ['/bin/bash', '-c', 'cft test run TestMultipleTables --verbose'] - id: create-dwh name: 'gcr.io/cloud-foundation-cicd/$_DOCKER_IMAGE_DEVELOPER_TOOLS:$_DOCKER_TAG_VERSION_DEVELOPER_TOOLS' args: ['/bin/bash', '-c', 'cft test run TestDataWarehouse --stage init --verbose'] diff --git a/test/fixtures/full/main.tf b/test/fixtures/multiple_tables/main.tf similarity index 92% rename from test/fixtures/full/main.tf rename to test/fixtures/multiple_tables/main.tf index 7e0339d3..1a6b233f 100644 --- a/test/fixtures/full/main.tf +++ b/test/fixtures/multiple_tables/main.tf @@ -19,5 +19,5 @@ module "example" { default_table_expiration_ms = var.default_table_expiration_ms project_id = var.project_id dataset_labels = var.dataset_labels - kms_key = jsondecode(var.kms_keys)["foo"] + kms_key = var.kms_keys.foo } diff --git a/test/fixtures/full/outputs.tf b/test/fixtures/multiple_tables/outputs.tf similarity index 100% rename from test/fixtures/full/outputs.tf rename to test/fixtures/multiple_tables/outputs.tf diff --git a/test/fixtures/full/sample_bq_schema.json b/test/fixtures/multiple_tables/sample_bq_schema.json similarity index 100% rename from test/fixtures/full/sample_bq_schema.json rename to test/fixtures/multiple_tables/sample_bq_schema.json diff --git a/test/fixtures/full/terraform.tfvars b/test/fixtures/multiple_tables/terraform.tfvars similarity index 100% rename from test/fixtures/full/terraform.tfvars rename to test/fixtures/multiple_tables/terraform.tfvars diff --git a/test/fixtures/full/variables.tf b/test/fixtures/multiple_tables/variables.tf similarity index 97% rename from test/fixtures/full/variables.tf rename to test/fixtures/multiple_tables/variables.tf index 499b7314..7adfb8bb 100644 --- a/test/fixtures/full/variables.tf +++ b/test/fixtures/multiple_tables/variables.tf @@ -24,7 +24,7 @@ variable "project_id" { variable "kms_keys" { description = "The KMS key module output" - default = null + type = map(string) } variable "dataset_labels" { diff --git a/test/integration/full/controls/big_query.rb b/test/integration/full/controls/big_query.rb deleted file mode 100644 index a1d5c385..00000000 --- a/test/integration/full/controls/big_query.rb +++ /dev/null @@ -1,93 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Attributes can be used to create tests with as the mode becomes more complex -project_id = attribute('bigquery_dataset')[:project] -dataset_name = attribute('bigquery_dataset')[:friendly_name] -tables = attribute('bigquery_tables') -external_tables = attribute('bigquery_external_tables') - -describe google_bigquery_dataset(project: "#{project_id}", name: "#{dataset_name}") do - it { should exist } - - its('friendly_name') { should eq "#{dataset_name}" } - its('description') { should eq 'some description' } - its('location') { should eq 'US' } - its('default_table_expiration_ms') { should cmp '3600000' } - its('default_encryption_configuration.kms_key_name') { should cmp "projects/#{project_id}/locations/us/keyRings/ci-bigquery-keyring/cryptoKeys/foo" } -end - -describe google_bigquery_table(project: "#{project_id}", dataset: "#{dataset_name}", name: "#{tables[:foo][:friendly_name]}") do - it { should exist } - its('friendly_name') { should eq "#{tables[:foo][:friendly_name]}" } - its('time_partitioning.type') { should eq 'DAY' } - its('clustering') { should_not be nil } -end - -describe google_bigquery_table(project: "#{project_id}", dataset: "#{dataset_name}", name: "#{tables[:bar][:friendly_name]}") do - it { should exist } - its('friendly_name') { should eq "#{tables[:bar][:friendly_name]}" } - its('time_partitioning.type') { should be nil } - its('clustering') { should be nil } -end - -describe google_bigquery_table(project: "#{project_id}", dataset: "#{dataset_name}", name: "#{external_tables[:csv_example][:friendly_name]}") do - it { should exist } - its('friendly_name') { should eq "#{external_tables[:csv_example][:friendly_name]}" } - its('time_partitioning.type') { should be nil } - its('clustering') { should be nil } - its('type') { should eq "EXTERNAL" } - its('external_data_configuration.autodetect') { should be true } - its('external_data_configuration.compression') { should eq "NONE" } - its('external_data_configuration.ignore_unknown_values') { should be true } - its('external_data_configuration.max_bad_records') { should be nil } - its('external_data_configuration.source_format') { should eq "CSV" } - its('external_data_configuration.source_uris') { should eq ["gs://ci-bq-external-data/bigquery-external-table-test.csv"] } - - its('external_data_configuration.csv_options.quote') { should eq "\"" } - its('external_data_configuration.csv_options.allow_jagged_rows') { should be nil } - its('external_data_configuration.csv_options.allow_quoted_newlines') { should be true } - its('external_data_configuration.csv_options.encoding') { should eq "UTF-8" } - its('external_data_configuration.csv_options.field_delimiter') { should eq "," } - its('external_data_configuration.csv_options.skip_leading_rows') { should eq "1" } -end - -describe google_bigquery_table(project: "#{project_id}", dataset: "#{dataset_name}", name: "#{external_tables[:hive_example][:friendly_name]}") do - it { should exist } - its('friendly_name') { should eq "#{external_tables[:hive_example][:friendly_name]}" } - its('time_partitioning.type') { should be nil } - its('clustering') { should be nil } - its('type') { should eq "EXTERNAL" } - its('external_data_configuration.autodetect') { should be true } - its('external_data_configuration.compression') { should eq "NONE" } - its('external_data_configuration.ignore_unknown_values') { should be true } - its('external_data_configuration.max_bad_records') { should be nil } - its('external_data_configuration.source_format') { should eq "CSV" } - its('external_data_configuration.source_uris') { should eq ["gs://ci-bq-external-data/hive_partition_example/year=2012/foo.csv","gs://ci-bq-external-data/hive_partition_example/year=2013/bar.csv"] } -end - -describe google_bigquery_table(project: "#{project_id}", dataset: "#{dataset_name}", name: "#{external_tables[:google_sheets_example][:friendly_name]}") do - it { should exist } - its('type') { should eq "EXTERNAL" } - its('friendly_name') { should eq "#{external_tables[:google_sheets_example][:friendly_name]}" } - its('time_partitioning.type') { should be nil } - its('clustering') { should be nil } - its('external_data_configuration.autodetect') { should be true } - its('external_data_configuration.compression') { should eq "NONE" } - its('external_data_configuration.ignore_unknown_values') { should be true } - its('external_data_configuration.max_bad_records') { should be nil } - its('external_data_configuration.source_format') { should eq "GOOGLE_SHEETS" } - its('external_data_configuration.source_uris') { should eq ["https://docs.google.com/spreadsheets/d/15v4N2UG6bv1RmX__wru4Ei_mYMdVcM1MwRRLxFKc55s"] } - its('external_data_configuration.google_sheets_options.skip_leading_rows') { should eq "1" } -end diff --git a/test/integration/full/inspec.yml b/test/integration/full/inspec.yml deleted file mode 100644 index 471b9dd1..00000000 --- a/test/integration/full/inspec.yml +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright 2023 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -name: local -depends: - - name: inspec-gcp - git: https://github.com/inspec/inspec-gcp.git - tag: v1.10.0 -supports: - - platform: gcp -attributes: - - name: bigquery_dataset - required: true - type: hash - - name: bigquery_tables - required: true - type: hash - - name: bigquery_external_tables - required: true - type: hash diff --git a/test/integration/multiple_tables/multiple_tables_test.go b/test/integration/multiple_tables/multiple_tables_test.go new file mode 100644 index 00000000..0e209f2a --- /dev/null +++ b/test/integration/multiple_tables/multiple_tables_test.go @@ -0,0 +1,133 @@ +// Copyright 2025 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package multiple_tables + +import ( + "fmt" + "testing" + + "github.com/GoogleCloudPlatform/cloud-foundation-toolkit/infra/blueprint-test/pkg/tft" + "github.com/stretchr/testify/assert" +) + +func TestMultipleTables(t *testing.T) { + dwh := tft.NewTFBlueprintTest(t) + + dwh.DefineVerify(func(assert *assert.Assertions) { + // This call is failing with "plan after apply should have no diffs", needs investigation. + dwh.DefaultVerify(assert) + + projectID := dwh.GetTFSetupStringOutput("project_id") + tables := dwh.GetJsonOutput("bigquery_tables") + externalTables := dwh.GetJsonOutput("bigquery_external_tables") + + { // Dataset test + dataset := dwh.GetJsonOutput("bigquery_dataset") + assert.Equal("foo", dataset.Get("friendly_name").String(), "dataset's friendly_name should be foo") + assert.Equal("some description", dataset.Get("description").String(), "dataset foo's description should be 'some description'") + assert.Equal("US", dataset.Get("location").String(), "dataset foo's location should be US") + assert.Equal(int64(3600000), dataset.Get("default_table_expiration_ms").Int(), "dataset foo's default_table_expiration_ms should be 3600000") + assert.Equal(fmt.Sprintf("projects/%s/locations/us/keyRings/ci-bigquery-keyring/cryptoKeys/foo", projectID), dataset.Get("default_encryption_configuration.0.kms_key_name").String(), "dataset foo's default_encryption_configuration.0.kms_key_name") + } + + { // "foo" table test + fooTable := tables.Get("foo") + assert.Equal("foo", fooTable.Get("friendly_name").String(), "table's friendly_name should be foo") + assert.Equal("DAY", fooTable.Get("time_partitioning.0.type").String(), "foo table's time_partitioning.type should be DAY") + + assert.Greater(len(fooTable.Get("clustering").Array()), 0, "foo table's clustering should be nonempty") + } + + { // "bar" table test + barTable := tables.Get("bar") + assert.True(barTable.Exists(), "bar table should exist in terraform outputs") + assert.Equal("bar", barTable.Get("friendly_name").String(), "table's friendly_name should be bar") + assert.False(barTable.Get("time_partitioning.0.type").Exists(), "bar table's time_partitioning.type should be null") + assert.Len(barTable.Get("clustering").Array(), 0, "bar table's clustering should be empty") + } + + { // CSV test + csvTable := externalTables.Get("csv_example") + assert.True(csvTable.Exists(), "csv_example table should exist in terraform outputs") + assert.Equal("csv_example", csvTable.Get("friendly_name").String(), "table's friendly_name should be csv_example") + assert.False(csvTable.Get("time_partitioning.0.type").Exists(), "csv_example table's time_partitioning.type should be null") + assert.Len(csvTable.Get("clustering").Array(), 0, "csv_example table's clustering should be empty") + assert.Equal("EXTERNAL", csvTable.Get("type").String(), "csv_example table's type should be EXTERNAL") + + csvDataConfig := csvTable.Get("external_data_configuration.0") + assert.True(csvDataConfig.Exists(), "csv_example table should contain external_data_configuration") + assert.True(csvDataConfig.Get("autodetect").Bool(), "csv_example.external_data_configuration.autodetect should be true") + assert.Equal("NONE", csvDataConfig.Get("compression").String(), "csv_example.external_data_configuration.compression should be NONE") + assert.True(csvDataConfig.Get("ignore_unknown_values").Bool(), "csv_example.external_data_configuration.ignore_unknown_values should be true") + assert.Equal(int64(0), csvDataConfig.Get("max_bad_records").Int(), "csv_example.external_data_configuration.max_bad_records should be 0") + assert.Equal("CSV", csvDataConfig.Get("source_format").String(), "csv_example.external_data_configuration.source_format should be CSV") + assert.Len(csvDataConfig.Get("source_uris").Array(), 1, "csv_example.external_data_configuration.source_uris should have 1 element") + assert.Equal("gs://ci-bq-external-data/bigquery-external-table-test.csv", csvDataConfig.Get("source_uris.0").String(), "csv_example.external_data_configuration.source_uris[0] should have the expected URI") + + csvOptions := csvDataConfig.Get("csv_options.0") + assert.True(csvOptions.Exists(), "csv_example table should contain external_data_configuration.csv_options") + assert.Equal(`"`, csvOptions.Get("quote").String(), `csv_example.external_data_configuration.csv_options should be " (a quote character)`) + assert.False(csvOptions.Get("allow_jagged_rows").Bool(), "csv_example.external_data_configuration.csv_options.allow_jagged_rows should be false") + assert.Equal(true, csvOptions.Get("allow_quoted_newlines").Bool(), "csv_example.external_data_configuration.csv_options.allow_quoted_newlines should be true") + assert.Equal("UTF-8", csvOptions.Get("encoding").String(), "csv_example.external_data_configuration.csv_options.encoding should be UTF-8") + assert.Equal(",", csvOptions.Get("field_delimiter").String(), "csv_example.external_data_configuration.csv_options.field_delimiter should be ,") + assert.Equal(int64(1), csvOptions.Get("skip_leading_rows").Int(), "csv_example.external_data_configuration.csv_options.skip_leading_rows should be 1") + } + + { // Hive test + hiveTable := externalTables.Get("hive_example") + assert.True(hiveTable.Exists(), "hive_example table should exist in terraform outputs") + assert.Equal("hive_example", hiveTable.Get("friendly_name").String(), "table's friendly_name should be hive_example") + assert.False(hiveTable.Get("time_partitioning.0.type").Exists(), "hive_example table's time_partitioning.type should be null") + assert.Len(hiveTable.Get("clustering").Array(), 0, "hive_example table's clustering should be empty") + assert.Equal("EXTERNAL", hiveTable.Get("type").String(), "hive_example table's type should be EXTERNAL") + + hiveDataConfig := hiveTable.Get("external_data_configuration.0") + assert.True(hiveDataConfig.Exists(), "hive_example table should contain external_data_configuration") + assert.True(hiveDataConfig.Get("autodetect").Bool(), "hive_example.external_data_configuration.autodetect should be true") + assert.Equal("NONE", hiveDataConfig.Get("compression").String(), "hive_example.external_data_configuration.compression should be NONE") + assert.True(hiveDataConfig.Get("ignore_unknown_values").Bool(), "hive_example.external_data_configuration.ignore_unknown_values should be true") + assert.Equal(int64(0), hiveDataConfig.Get("max_bad_records").Int(), "hive_example.external_data_configuration.max_bad_records should be 0") + assert.Equal("CSV", hiveDataConfig.Get("source_format").String(), "hive_example.external_data_configuration.source_format should be CSV") + + assert.Len(hiveDataConfig.Get("source_uris").Array(), 2, "hive_example.external_data_configuration.source_uris should have 2 elements") + assert.Equal("gs://ci-bq-external-data/hive_partition_example/year=2012/foo.csv", hiveDataConfig.Get("source_uris.0").String(), "hive_example.external_data_configuration.source_uris[0] should have the expected URI") + assert.Equal("gs://ci-bq-external-data/hive_partition_example/year=2013/bar.csv", hiveDataConfig.Get("source_uris.1").String(), "hive_example.external_data_configuration.source_uris[1] should have the expected URI") + } + + { // Google Sheets test + sheetsTable := externalTables.Get("google_sheets_example") + assert.True(sheetsTable.Exists(), "google_sheets_example table should exist in terraform outputs") + assert.Equal("google_sheets_example", sheetsTable.Get("friendly_name").String(), "table's friendly_name should be google_sheets_example") + assert.False(sheetsTable.Get("time_partitioning.0.type").Exists(), "google_sheets_example table's time_partitioning.type should be null") + assert.Len(sheetsTable.Get("clustering").Array(), 0, "google_sheets_example table's clustering should be empty") + assert.Equal("EXTERNAL", sheetsTable.Get("type").String(), "google_sheets_example table's type should be EXTERNAL") + + sheetsDataConfig := sheetsTable.Get("external_data_configuration.0") + assert.True(sheetsDataConfig.Exists(), "google_sheets_example table should contain external_data_configuration") + assert.True(sheetsDataConfig.Get("autodetect").Bool(), "google_sheets_example.external_data_configuration.autodetect should be true") + assert.Equal("NONE", sheetsDataConfig.Get("compression").String(), "google_sheets_example.external_data_configuration.compression should be NONE") + assert.True(sheetsDataConfig.Get("ignore_unknown_values").Bool(), "google_sheets_example.external_data_configuration.ignore_unknown_values should be true") + assert.Equal(int64(0), sheetsDataConfig.Get("max_bad_records").Int(), "google_sheets_example.external_data_configuration.max_bad_records should be 0") + assert.Equal("GOOGLE_SHEETS", sheetsDataConfig.Get("source_format").String(), "google_sheets_example.external_data_configuration.source_format should be CSV") + + assert.Len(sheetsDataConfig.Get("source_uris").Array(), 1, "google_sheets_example.external_data_configuration.source_uris should have 1 element") + assert.Equal("https://docs.google.com/spreadsheets/d/15v4N2UG6bv1RmX__wru4Ei_mYMdVcM1MwRRLxFKc55s", sheetsDataConfig.Get("source_uris.0").String(), "google_sheets_example.external_data_configuration.source_uris[0] should have the expected URI") + + assert.Equal(int64(1), sheetsDataConfig.Get("google_sheets_options.0.skip_leading_rows").Int(), "google_sheets_example.external_data_configuration.google_sheets_options.0.skip_leading_rows should be 1") + } + }) + dwh.Test() +} diff --git a/test/setup/main.tf b/test/setup/main.tf index 2e0f2833..a05fb7d6 100644 --- a/test/setup/main.tf +++ b/test/setup/main.tf @@ -71,6 +71,7 @@ module "project" { folder_id = var.folder_id billing_account = var.billing_account default_service_account = "keep" + deletion_policy = "DELETE" activate_apis = tolist(toset(flatten(values(local.per_module_services)))) }