@@ -9,15 +9,13 @@ The main idea behind this module is to deploy resources for Databricks Workspace
99Here we provide some examples of how to provision it with a different options.
1010
1111### In example below, these features of given module would be covered:
12- 1 . Workspace admins assignment, custom Workspace group creation, group assignments, group entitlements
13- 2 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
14- 3 . Workspace IP Access list creation
15- 4 . ADLS Gen2 Mount
16- 5 . Create Secret Scope and assign permissions to custom groups
17- 6 . SQL Endpoint creation and configuration
18- 7 . Create Cluster policy
19- 8 . Create an Azure Key Vault-backed secret scope
20- 9 . Connect to already existing Unity Catalog Metastore
12+ 1 . Clusters (i.e., for Unity Catalog and Shared Autoscaling)
13+ 2 . Workspace IP Access list creation
14+ 3 . ADLS Gen2 Mount
15+ 4 . Create Secret Scope and assign permissions to custom groups
16+ 5 . SQL Endpoint creation and configuration
17+ 6 . Create Cluster policy
18+ 7 . Create an Azure Key Vault-backed secret scope
2119
2220``` hcl
2321# Prerequisite resources
@@ -56,25 +54,10 @@ module "databricks_runtime_premium" {
5654 sp_key_secret_name = "sp-key" # secret's name that stores Service Principal Secret Key
5755 tenant_id_secret_name = "infra-arm-tenant-id" # secret's name that stores tenant id value
5856
59- # 1.1 Workspace admins
60- workspace_admins = {
61- 62- service_principal = ["example-app-id"]
63- }
64-
65- # 1.2 Custom Workspace group with assignments.
66- # In addition, provides an ability to create group and entitlements.
67- iam = [{
68- group_name = "DEVELOPERS"
69- permissions = ["ADMIN"]
70- entitlements = [
71- "allow_instance_pool_create",
72- "allow_cluster_create",
73- "databricks_sql_access"
74- ]
75- }]
57+ # Cloud provider
58+ cloud_name = cloud-name # cloud provider (e.g., aws, azure)
7659
77- # 2 . Databricks clusters configuration, and assign permission to a custom group on clusters.
60+ # 1 . Databricks clusters configuration, and assign permission to a custom group on clusters.
7861 databricks_cluster_configs = [ {
7962 cluster_name = "Unity Catalog"
8063 data_security_mode = "USER_ISOLATION"
@@ -90,33 +73,33 @@ module "databricks_runtime_premium" {
9073 permissions = [{group_name = "DEVELOPERS", permission_level = "CAN_MANAGE"}]
9174 }]
9275
93- # 3 . Workspace could be accessed only from these IP Addresses:
76+ # 2 . Workspace could be accessed only from these IP Addresses:
9477 ip_rules = {
9578 "ip_range_1" = "10.128.0.0/16",
9679 "ip_range_2" = "10.33.0.0/16",
9780 }
9881
99- # 4 . ADLS Gen2 Mount
82+ # 3 . ADLS Gen2 Mount
10083 mountpoints = {
10184 storage_account_name = data.azurerm_storage_account.example.name
10285 container_name = "example_container"
10386 }
10487
105- # 5 . Create Secret Scope and assign permissions to custom groups
88+ # 4 . Create Secret Scope and assign permissions to custom groups
10689 secret_scope = [{
10790 scope_name = "extra-scope"
10891 acl = [{ principal = "DEVELOPERS", permission = "READ" }] # Only custom workspace group names are allowed. If left empty then only Workspace admins could access these keys
10992 secrets = [{ key = "secret-name", string_value = "secret-value"}]
11093 }]
11194
112- # 6 . SQL Warehouse Endpoint
95+ # 5 . SQL Warehouse Endpoint
11396 databricks_sql_endpoint = [{
11497 name = "default"
11598 enable_serverless_compute = true
11699 permissions = [{ group_name = "DEVELOPERS", permission_level = "CAN_USE" },]
117100 }]
118101
119- # 7 . Databricks cluster policies
102+ # 6 . Databricks cluster policies
120103 custom_cluster_policies = [{
121104 name = "custom_policy_1",
122105 can_use = "DEVELOPERS", # custom workspace group name, that is allowed to use this policy
@@ -129,7 +112,7 @@ module "databricks_runtime_premium" {
129112 }
130113 }]
131114
132- # 8 . Azure Key Vault-backed secret scope
115+ # 7 . Azure Key Vault-backed secret scope
133116 key_vault_secret_scope = [{
134117 name = "external"
135118 key_vault_id = data.azurerm_key_vault.example.id
@@ -141,27 +124,14 @@ module "databricks_runtime_premium" {
141124 }
142125}
143126
144- # 9 Assignment already existing Unity Catalog Metastore
145- module "metastore_assignment" {
146- source = "data-platform-hq/metastore-assignment/databricks"
147- version = "1.0.0"
148-
149- workspace_id = data.azurerm_databricks_workspace.example.workspace_id
150- metastore_id = "<uuid-of-metastore>"
151-
152- providers = {
153- databricks = databricks.workspace
154- }
155- }
156-
157127```
158128
159129<!-- BEGIN_TF_DOCS -->
160130## Requirements
161131
162132| Name | Version |
163133| ------| ---------|
164- | <a name =" requirement_terraform " ></a > [ terraform] ( #requirement\_ terraform ) | >= 1.0 |
134+ | <a name =" requirement_terraform " ></a > [ terraform] ( #requirement\_ terraform ) | >=1.3 |
165135| <a name =" requirement_databricks " ></a > [ databricks] ( #requirement\_ databricks ) | ~ >1.0 |
166136
167137## Providers
0 commit comments