Skip to content

Commit e50f0c6

Browse files
authored
Prepare version 0.2.5
Co-authored-by: Serge Smertin <[email protected]>
1 parent f2929df commit e50f0c6

File tree

11 files changed

+45
-91
lines changed

11 files changed

+45
-91
lines changed

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ terraform {
4444
required_providers {
4545
databricks = {
4646
source = "databrickslabs/databricks"
47-
version = ... # replace dots with latest version
47+
version = "0.2.5"
4848
}
4949
}
5050
}

access/acceptance/secret_acl_test.go

Lines changed: 2 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import (
1010

1111
"github.com/databrickslabs/databricks-terraform/common"
1212
"github.com/databrickslabs/databricks-terraform/internal/acceptance"
13+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
1314
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
1415
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
1516
"github.com/stretchr/testify/assert"
@@ -22,12 +23,7 @@ func TestAccSecretAclResource(t *testing.T) {
2223
}
2324
//var secretScope Secre
2425
var secretACL ACLItem
25-
// generate a random name for each tokenInfo test run, to avoid
26-
// collisions from multiple concurrent tests.
27-
// the acctest package includes many helpers such as RandStringFromCharSet
28-
// See https://godoc.org/github.com/hashicorp/terraform-plugin-sdk/helper/acctest
29-
//scope := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
30-
scope := "terraform_acc_test_acl"
26+
scope := fmt.Sprintf("tf-scope-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
3127
principal := "users"
3228
permission := "READ"
3329

access/acceptance/secret_scope_test.go

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import (
1010

1111
"github.com/databrickslabs/databricks-terraform/common"
1212
"github.com/databrickslabs/databricks-terraform/internal/acceptance"
13+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/acctest"
1314
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
1415
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
1516
"github.com/stretchr/testify/assert"
@@ -22,7 +23,7 @@ func TestAccSecretScopeResource(t *testing.T) {
2223
}
2324
var secretScope SecretScope
2425

25-
scope := "terraform_acc_test_scope"
26+
scope := fmt.Sprintf("tf-%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
2627

2728
acceptance.AccTest(t, resource.TestCase{
2829
CheckDestroy: testSecretScopeResourceDestroy,

compute/acceptance/cluster_policy_test.go

Lines changed: 0 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -40,30 +40,6 @@ func TestAccClusterPolicyResourceFullLifecycle(t *testing.T) {
4040
Check: resource.TestCheckResourceAttr("databricks_cluster_policy.external_metastore",
4141
"name", fmt.Sprintf("Terraform policy %s", randomName+": UPDATED")),
4242
},
43-
{
44-
Config: testExternalMetastore(randomName + ": UPDATED"),
45-
Destroy: true,
46-
Check: acceptance.ResourceCheck("databricks_cluster_policy.external_metastore",
47-
func(client *common.DatabricksClient, id string) error {
48-
resp, err := NewClusterPoliciesAPI(client).Get(id)
49-
if err == nil {
50-
return fmt.Errorf("Resource must have been deleted but: %v", resp)
51-
}
52-
return nil
53-
}),
54-
},
55-
{
56-
// and create it again
57-
Config: testExternalMetastore(randomName + ": UPDATED"),
58-
Check: acceptance.ResourceCheck("databricks_cluster_policy.external_metastore",
59-
func(client *common.DatabricksClient, id string) error {
60-
_, err := NewClusterPoliciesAPI(client).Get(id)
61-
if err != nil {
62-
return err
63-
}
64-
return nil
65-
}),
66-
},
6743
},
6844
})
6945
}

compute/acceptance/cluster_test.go

Lines changed: 7 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -299,25 +299,18 @@ func TestAwsAccClusterResource_CreateClusterViaInstancePool(t *testing.T) {
299299
}
300300

301301
func TestAzureAccClusterResource_CreateClusterViaInstancePool(t *testing.T) {
302-
randomInstancePoolName := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
303-
randomInstancePoolInterpolation := fmt.Sprintf("databricks_instance_pool.%s.id", randomInstancePoolName)
304-
randomClusterSuffix := acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum)
305-
randomClusterName := fmt.Sprintf("cluster-%s", randomClusterSuffix)
306-
randomClusterId := fmt.Sprintf("databricks_cluster.%s", randomClusterName)
307-
defaultAzureInstancePoolClusterTest :=
308-
newInstancePoolHCLBuilder(randomInstancePoolName).
309-
withCloudEnv().
310-
build() +
311-
newClusterHCLBuilder(randomClusterName).
312-
withInstancePool(randomInstancePoolInterpolation).
313-
build()
314-
302+
randomInstancePoolName := fmt.Sprintf("pool_%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
303+
randomClusterName := fmt.Sprintf("cluster_%s", acctest.RandStringFromCharSet(10, acctest.CharSetAlphaNum))
304+
defaultAzureInstancePoolClusterTest := newInstancePoolHCLBuilder(randomInstancePoolName).withCloudEnv().build() +
305+
newClusterHCLBuilder(randomClusterName).withInstancePool(
306+
fmt.Sprintf("databricks_instance_pool.%s.id", randomInstancePoolName)).build()
315307
acceptance.AccTest(t, resource.TestCase{
316308
Steps: []resource.TestStep{
317309
{
318310
Config: defaultAzureInstancePoolClusterTest,
319311
Check: resource.ComposeTestCheckFunc(
320-
testClusterCheckAndTerminateForFutureTests(randomClusterId, t),
312+
testClusterCheckAndTerminateForFutureTests(
313+
fmt.Sprintf("databricks_cluster.%s", randomClusterName), t),
321314
),
322315
},
323316
},

docs/changelog.md

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,15 @@
11
# Version changelog
22

3+
## 0.2.5
4+
5+
* Added support for [local disk encryption](https://github.com/databrickslabs/terraform-provider-databricks/pull/313)
6+
* Added more reliable [indication about Azure environment](https://github.com/databrickslabs/terraform-provider-databricks/pull/312) and fixed azure auth issue for Terraform 0.13
7+
* Updated [databricks_aws_crossaccount_policy](https://github.com/databrickslabs/terraform-provider-databricks/pull/311) to latest rules
8+
* Fixed missing importers for [databricks_scim_*](https://github.com/databrickslabs/terraform-provider-databricks/pull/290) resources
9+
* Updated [Terraform Plugin SDK](https://github.com/databrickslabs/terraform-provider-databricks/pull/279) to latest version along with transitive dependencies.
10+
* Added support disclaimers
11+
* Increased code coverage to 65%
12+
313
## 0.2.4
414

515
* Added [Azure CLI authentication](https://github.com/databrickslabs/terraform-provider-databricks/blob/master/docs/index.md#authenticating-with-azure-cli) to bridge the gap of local development workflows and let more people use the provider.

docs/data-sources/aws_crossaccount_policy.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
# databricks_aws_crossaccount_policy Data Source
22

3-
This data source constructs necessary AWS cross-account policy for you, which is based on [official documentation](https://docs.databricks.com/administration-guide/account-settings/aws-accounts.html).
3+
-> **Note** This resource has evolving API, which may change in future versions of provider. Please always consult [latest documentation](https://docs.databricks.com/administration-guide/account-api/iam-role.html#language-Your%C2%A0VPC,%C2%A0default) in case of any questions.
4+
5+
This data source constructs necessary AWS cross-account policy for you, which is based on [official documentation](https://docs.databricks.com/administration-guide/account-api/iam-role.html#language-Your%C2%A0VPC,%C2%A0default).
46

57
## Example Usage
68

internal/sanity/utils_test.go

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -135,6 +135,12 @@ func TestAccMissingResourcesInWorkspace(t *testing.T) {
135135
return err
136136
},
137137
},
138+
{
139+
Name: "Cluster Policies Delete",
140+
ReadFunc: func() error {
141+
return compute.NewClusterPoliciesAPI(client).Delete(randStringID)
142+
},
143+
},
138144
{
139145
Name: "Jobs",
140146
ReadFunc: func() error {

scripts/run.sh

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -124,6 +124,16 @@ function go_test {
124124
if [[ $@ == *"--tee"* ]]; then
125125
go_test $2 2>&1 | tee out.log
126126
echo "✓ To output of existing tests: less $PWD/out.log"
127+
128+
FAILURES=$(grep "\-\-\- FAIL" out.log | sed 's/--- FAIL: /\* \[ \]/g' | sort)
129+
PASSES=$(grep PASS out.log | grep Test | sort | sed 's/PASS/ \* \[x\]/')
130+
131+
cat <<-EOF > test-report.log
132+
$1
133+
---
134+
${FAILURES}
135+
${PASSES}
136+
EOF
127137
else
128138
go_test $2
129139
fi

storage/acceptance/adls_gen2_test.go

Lines changed: 0 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@ package acceptance
22

33
import (
44
"os"
5-
"regexp"
65
"testing"
76

87
"github.com/databrickslabs/databricks-terraform/common"
@@ -45,42 +44,3 @@ func TestAzureAccAdlsGen2Mount_correctly_mounts(t *testing.T) {
4544
},
4645
})
4746
}
48-
49-
func TestAzureAccAdlsGen2Mount_capture_error(t *testing.T) {
50-
if _, ok := os.LookupEnv("CLOUD_ENV"); !ok {
51-
t.Skip("Acceptance tests skipped unless env 'CLOUD_ENV' is set")
52-
}
53-
client := common.CommonEnvironmentClient()
54-
if !client.AzureAuth.IsClientSecretSet() {
55-
t.Skip("Test is meant only for SP Azure")
56-
}
57-
acceptance.AccTest(t, resource.TestCase{
58-
Steps: []resource.TestStep{
59-
{
60-
Config: qa.EnvironmentTemplate(t, `
61-
resource "databricks_secret_scope" "terraform" {
62-
name = "terraform-{var.RANDOM}"
63-
initial_manage_principal = "users"
64-
}
65-
resource "databricks_secret" "client_secret" {
66-
key = "datalake_sp_secret"
67-
string_value = "{env.ARM_CLIENT_SECRET}"
68-
scope = databricks_secret_scope.terraform.name
69-
}
70-
resource "databricks_azure_adls_gen2_mount" "mount" {
71-
storage_account_name = "{env.TEST_STORAGE_V2_ACCOUNT}"
72-
container_name = "{env.TEST_STORAGE_V2_ABFSS}"
73-
mount_name = "localdir{var.RANDOM}"
74-
tenant_id = "{env.ARM_TENANT_ID}"
75-
client_id = "{env.ARM_CLIENT_ID}"
76-
client_secret_scope = databricks_secret_scope.terraform.name
77-
client_secret_key = "SECRET_KEY_WRONG_ON_PURPOSE"
78-
initialize_file_system = true
79-
}`),
80-
ExpectNonEmptyPlan: true,
81-
ExpectError: regexp.MustCompile("Secret does not exist with scope"),
82-
Destroy: false,
83-
},
84-
},
85-
})
86-
}

0 commit comments

Comments
 (0)