Skip to content

Commit 0908416

Browse files
authored
Fix documentation for databricks_storage_credential and databricks_external_location data sources (#3588)
Changes include: * Documented right structure returned by these data sources - the nested structures weren't mentioned there at all. * Fixed examples for both data sources * Added `id` attribute for easier reference to the data sources * Reformatted the rest of doc examples
1 parent 9648502 commit 0908416

9 files changed

+55
-43
lines changed

catalog/data_external_location.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import (
1010

1111
func DataSourceExternalLocation() common.Resource {
1212
type ExternalLocationByID struct {
13+
Id string `json:"id,omitempty" tf:"computed"`
1314
Name string `json:"name"`
1415
ExternalLocation *catalog.ExternalLocationInfo `json:"external_location_info,omitempty" tf:"computed" `
1516
}
@@ -19,6 +20,7 @@ func DataSourceExternalLocation() common.Resource {
1920
return err
2021
}
2122
data.ExternalLocation = location
23+
data.Id = location.Name
2224
return nil
2325
})
2426
}

catalog/data_external_location_test.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,9 @@ func TestExternalLocationDataVerify(t *testing.T) {
3131
name = "abc"
3232
`,
3333
}.ApplyAndExpectData(t, map[string]any{
34-
"external_location_info.0.owner": "admin",
35-
"external_location_info.0.url": "s3://test",
34+
"id": "abc",
35+
"external_location_info.0.owner": "admin",
36+
"external_location_info.0.url": "s3://test",
3637
"external_location_info.0.credential_name": "test",
3738
"external_location_info.0.read_only": true,
3839
})

catalog/data_storage_credential.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ import (
1010

1111
func DataSourceStorageCredential() common.Resource {
1212
type AccountMetastoreByID struct {
13+
Id string `json:"id,omitempty" tf:"computed"`
1314
Name string `json:"name"`
1415
StorageCredential *catalog.StorageCredentialInfo `json:"storage_credential_info,omitempty" tf:"computed" `
1516
}
@@ -19,6 +20,7 @@ func DataSourceStorageCredential() common.Resource {
1920
return err
2021
}
2122
data.StorageCredential = credential
23+
data.Id = credential.Id
2224
return nil
2325
})
2426
}

catalog/data_storage_credential_test.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ func TestStorageCredentialDataVerify(t *testing.T) {
1515
e := w.GetMockStorageCredentialsAPI().EXPECT()
1616
e.GetByName(mock.Anything, "abc").Return(
1717
&catalog.StorageCredentialInfo{
18+
Id: "1234",
1819
Name: "abc",
1920
Owner: "admin",
2021
AwsIamRole: &catalog.AwsIamRoleResponse{
@@ -37,7 +38,8 @@ func TestStorageCredentialDataVerify(t *testing.T) {
3738
name = "abc"
3839
`,
3940
}.ApplyAndExpectData(t, map[string]any{
40-
"storage_credential_info.0.owner": "admin",
41+
"id": "1234",
42+
"storage_credential_info.0.owner": "admin",
4143
"storage_credential_info.0.aws_iam_role.0.role_arn": "test",
4244
"storage_credential_info.0.azure_managed_identity.0.access_connector_id": "test",
4345
"storage_credential_info.0.databricks_gcp_service_account.0.email": "test",

docs/data-sources/external_location.md

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,24 +17,33 @@ data "databricks_external_location" "this" {
1717
}
1818
1919
output "created_by" {
20-
value = data.databricks_external_location.this.created_by
21-
sensitive = false
20+
value = data.databricks_external_location.this.external_location_info[0].created_by
2221
}
2322
```
2423

2524
## Argument Reference
2625

27-
* `name` - (Required) The name of the storage credential
26+
* `name` - (Required) The name of the external location
2827

2928
## Attribute Reference
3029

31-
* `url` - Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
32-
* `credential_name` - Name of the [databricks_storage_credential](storage_credential.md) to use with this external location.
33-
* `owner` - Username/groupname/sp application_id of the external location owner.
34-
* `comment` - User-supplied comment.
35-
* `read_only` - Indicates whether the external location is read-only.
36-
* `access_point` - The ARN of the s3 access point to use with the external location (AWS).
37-
* `encryption_details` - The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
30+
This data source exports the following attributes:
31+
32+
* `id` - external location ID - same as name.
33+
* `external_location_info` - array of objects with information about external location:
34+
* `url` - Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
35+
* `credential_name` - Name of the [databricks_storage_credential](storage_credential.md) to use with this external location.
36+
* `credential_id` - Unique ID of storage credential.
37+
* `metastore_id` - Unique identifier of the parent Metastore.
38+
* `owner` - Username/groupname/sp application_id of the external location owner.
39+
* `comment` - User-supplied comment.
40+
* `read_only` - Indicates whether the external location is read-only.
41+
* `created_at` - Time at which this catalog was created, in epoch milliseconds.
42+
* `created_by` - Username of catalog creator.
43+
* `updated_at` - Time at which this catalog was last modified, in epoch milliseconds.
44+
* `updated_by` - Username of user who last modified catalog.
45+
* `access_point` - The ARN of the s3 access point to use with the external location (AWS).
46+
* `encryption_details` - The options for Server-Side Encryption to be used by each Databricks s3 client when connecting to S3 cloud storage (AWS).
3847

3948
## Related Resources
4049

docs/data-sources/mlflow_experiment.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,4 +35,4 @@ This data source exports the following attributes:
3535
* `last_update_time` - Last update time in unix time stamp.
3636
* `lifecycle_stage` - Current life cycle stage of the experiment: `active` or `deleted`.
3737
* `name` - Path to experiment.
38-
* `tags` - Additional metadata key-value pairs.
38+
* `tags` - Additional metadata key-value pairs.

docs/data-sources/storage_credential.md

Lines changed: 22 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,7 @@ data "databricks_storage_credential" "this" {
1717
}
1818
1919
output "created_by" {
20-
value = data.databricks_storage_credential.this.created_by
21-
sensitive = false
20+
value = data.databricks_storage_credential.this.storage_credential_info[0].created_by
2221
}
2322
```
2423

@@ -30,30 +29,27 @@ output "created_by" {
3029

3130
This data source exports the following attributes:
3231

33-
* `metastore_id` - Unique identifier of the parent Metastore.
34-
* `owner` - Username/groupname/sp application_id of the storage credential owner.
35-
* `read_only` - Indicates whether the storage credential is only usable for read operations.
36-
37-
`aws_iam_role` credential details for AWS:
38-
39-
* `role_arn` - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form `arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF`
40-
* `external_id` (output only) - The external ID used in role assumption to prevent confused deputy problem.
41-
* `unity_catalog_iam_arn` (output only) - The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.
42-
43-
`azure_managed_identity` managed identity credential details for Azure
44-
45-
* `access_connector_id` - The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`.
46-
47-
* `managed_identity_id` - The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`.
48-
49-
`databricks_gcp_service_account` credential details for GCP:
50-
51-
* `email` - The email of the GCP service account created, to be granted access to relevant buckets.
52-
53-
`azure_service_principal` service principal credential details for Azure:
54-
55-
* `directory_id` - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
56-
* `application_id` - The application ID of the application registration within the referenced AAD tenant
32+
* `id` - Unique ID of storage credential.
33+
* `storage_credential_info` - array of objects with information about storage credential.
34+
* `metastore_id` - Unique identifier of the parent Metastore.
35+
* `owner` - Username/groupname/sp application_id of the storage credential owner.
36+
* `read_only` - Indicates whether the storage credential is only usable for read operations.
37+
* `created_at` - Time at which this catalog was created, in epoch milliseconds.
38+
* `created_by` - Username of catalog creator.
39+
* `updated_at` - Time at which this catalog was last modified, in epoch milliseconds.
40+
* `updated_by` - Username of user who last modified catalog.
41+
* `aws_iam_role` credential details for AWS:
42+
* `role_arn` - The Amazon Resource Name (ARN) of the AWS IAM role for S3 data access, of the form `arn:aws:iam::1234567890:role/MyRole-AJJHDSKSDF`
43+
* `external_id` (output only) - The external ID used in role assumption to prevent confused deputy problem.
44+
* `unity_catalog_iam_arn` (output only) - The Amazon Resource Name (ARN) of the AWS IAM user managed by Databricks. This is the identity that is going to assume the AWS IAM role.
45+
* `azure_managed_identity` managed identity credential details for Azure
46+
* `access_connector_id` - The Resource ID of the Azure Databricks Access Connector resource, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.Databricks/accessConnectors/connector-name`.
47+
* `managed_identity_id` - The Resource ID of the Azure User Assigned Managed Identity associated with Azure Databricks Access Connector, of the form `/subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/rg-name/providers/Microsoft.ManagedIdentity/userAssignedIdentities/user-managed-identity-name`.
48+
* `azure_service_principal` service principal credential details for Azure:
49+
* `directory_id` - The directory ID corresponding to the Azure Active Directory (AAD) tenant of the application
50+
* `application_id` - The application ID of the application registration within the referenced AAD tenant
51+
* `databricks_gcp_service_account` credential details for GCP:
52+
* `email` - The email of the GCP service account created, to be granted access to relevant buckets.
5753

5854
## Related Resources
5955

docs/resources/compliance_security_profile_setting.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ turned off. This setting can NOT be disabled once it is enabled.
1515
```hcl
1616
resource "databricks_compliance_security_profile_workspace_setting" "this" {
1717
compliance_security_profile_workspace {
18-
is_enabled = true
18+
is_enabled = true
1919
compliance_standards = ["HIPAA", "FEDRAMP_MODERATE"]
2020
}
2121
}
@@ -34,4 +34,4 @@ This resource can be imported by predefined name `global`:
3434

3535
```bash
3636
terraform import databricks_compliance_security_profile_workspace_setting.this global
37-
```
37+
```

docs/resources/enhanced_security_monitoring_setting.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,4 +33,4 @@ This resource can be imported by predefined name `global`:
3333

3434
```bash
3535
terraform import databricks_enhanced_security_monitoring_workspace_setting.this global
36-
```
36+
```

0 commit comments

Comments
 (0)