Skip to content

Commit b9fb47c

Browse files
alexottmgyucht
andauthored
[Feature] Add databricks_alert resource to replace databricks_sql_alert (#4051)
## Changes <!-- Summary of your changes that are easy to understand --> The new resource uses the [new Alerts API](https://docs.databricks.com/api/workspace/alerts/create) instead of the legacy one that will be deprecated. Since the new resource has a slightly different set of parameters, it was decided to create a new resource and deprecate the old one. This resource uses old TF SDK to be compatible with TF exporter (until #4050 is implemented). TODOs: - Need to discuss how to handle permissions - `sql_alert` permissions look like working, but not sure if we should continue to use that API - Support in the exporter will be in a separate PR ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [x] covered with integration tests in `internal/acceptance` - [x] relevant acceptance tests are passing - [x] using Go SDK --------- Co-authored-by: Miles Yucht <[email protected]>
1 parent 6112713 commit b9fb47c

File tree

11 files changed

+698
-3
lines changed

11 files changed

+698
-3
lines changed

common/resource.go

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -443,13 +443,20 @@ func genericDatabricksData[T, P, C any](
443443
// WorkspacePathPrefixDiffSuppress suppresses diffs for workspace paths where both sides
444444
// may or may not include the `/Workspace` prefix.
445445
//
446-
// This is the case for dashboards where at create time, the user may include the `/Workspace`
446+
// This is the case for dashboards, alerts and queries where at create time, the user may include the `/Workspace`
447447
// prefix for the `parent_path` field, but the read response will not include the prefix.
448448
func WorkspacePathPrefixDiffSuppress(k, old, new string, d *schema.ResourceData) bool {
449449
const prefix = "/Workspace"
450450
return strings.TrimPrefix(old, prefix) == strings.TrimPrefix(new, prefix)
451451
}
452452

453+
// WorkspaceOrEmptyPathPrefixDiffSuppress is similar WorkspacePathPrefixDiffSuppress but also suppresses diffs
454+
// when the new value is empty (not specified by user).
455+
func WorkspaceOrEmptyPathPrefixDiffSuppress(k, old, new string, d *schema.ResourceData) bool {
456+
const prefix = "/Workspace"
457+
return (old != "" && new == "") || strings.TrimPrefix(old, prefix) == strings.TrimPrefix(new, prefix)
458+
}
459+
453460
func EqualFoldDiffSuppress(k, old, new string, d *schema.ResourceData) bool {
454461
if strings.EqualFold(old, new) {
455462
log.Printf("[INFO] Suppressing diff on %s", k)

common/resource_test.go

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -187,6 +187,15 @@ func TestWorkspacePathPrefixDiffSuppress(t *testing.T) {
187187
assert.False(t, WorkspacePathPrefixDiffSuppress("k", "/Workspace/1", "/Workspace/2", nil))
188188
}
189189

190+
func TestWorkspaceOrEmptyPathPrefixDiffSuppress(t *testing.T) {
191+
assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/Workspace/foo/bar", "/Workspace/foo/bar", nil))
192+
assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/Workspace/foo/bar", "/foo/bar", nil))
193+
assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/foo/bar", "/Workspace/foo/bar", nil))
194+
assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/foo/bar", "/foo/bar", nil))
195+
assert.True(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/foo/bar", "", nil))
196+
assert.False(t, WorkspaceOrEmptyPathPrefixDiffSuppress("k", "/Workspace/1", "/Workspace/2", nil))
197+
}
198+
190199
func TestEqualFoldDiffSuppress(t *testing.T) {
191200
assert.True(t, EqualFoldDiffSuppress("k", "A", "a", nil))
192201
assert.False(t, EqualFoldDiffSuppress("k", "A", "A2", nil))

docs/resources/alert.md

Lines changed: 196 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,196 @@
1+
---
2+
subcategory: "Databricks SQL"
3+
---
4+
# databricks_alert Resource
5+
6+
This resource allows you to manage [Databricks SQL Alerts](https://docs.databricks.com/en/sql/user/alerts/index.html). It supersedes [databricks_sql_alert](sql_alert.md) resource - see migration guide below for more details.
7+
8+
## Example Usage
9+
10+
```hcl
11+
resource "databricks_directory" "shared_dir" {
12+
path = "/Shared/Queries"
13+
}
14+
15+
# This will be replaced with new databricks_query resource
16+
resource "databricks_sql_query" "this" {
17+
data_source_id = databricks_sql_endpoint.example.data_source_id
18+
name = "My Query Name"
19+
query = "SELECT 42 as value"
20+
parent = "folders/${databricks_directory.shared_dir.object_id}"
21+
}
22+
23+
resource "databricks_alert" "alert" {
24+
query_id = databricks_sql_query.this.id
25+
display_name = "TF new alert"
26+
parent_path = databricks_directory.shared_dir.path
27+
condition {
28+
op = "GREATER_THAN"
29+
operand {
30+
column {
31+
name = "value"
32+
}
33+
}
34+
threshold {
35+
value {
36+
double_value = 42
37+
}
38+
}
39+
}
40+
}
41+
```
42+
43+
## Argument Reference
44+
45+
The following arguments are available:
46+
47+
* `query_id` - (Required, String) ID of the query evaluated by the alert.
48+
* `display_name` - (Required, String) Name of the alert.
49+
* `condition` - (Required) Trigger conditions of the alert. Block consists of the following attributes:
50+
* `op` - (Required, String Enum) Operator used for comparison in alert evaluation. (Enum: `GREATER_THAN`, `GREATER_THAN_OR_EQUAL`, `LESS_THAN`, `LESS_THAN_OR_EQUAL`, `EQUAL`, `NOT_EQUAL`, `IS_NULL`)
51+
* `operand` - (Required, Block) Name of the column from the query result to use for comparison in alert evaluation:
52+
* `column` - (Required, Block) Block describing the column from the query result to use for comparison in alert evaluation:
53+
* `name` - (Required, String) Name of the column.
54+
* `threshold` - (Optional for `IS_NULL` operation, Block) Threshold value used for comparison in alert evaluation:
55+
* `value` - (Required, Block) actual value used in comparison (one of the attributes is required):
56+
* `string_value` - string value to compare against string results.
57+
* `double_value` - double value to compare against integer and double results.
58+
* `bool_value` - boolean value (`true` or `false`) to compare against boolean results.
59+
* `empty_result_state` - (Optional, String Enum) Alert state if the result is empty (`UNKNOWN`, `OK`, `TRIGGERED`)
60+
* `custom_subject` - (Optional, String) Custom subject of alert notification, if it exists. This includes email subject, Slack notification header, etc. See [Alerts API reference](https://docs.databricks.com/en/sql/user/alerts/index.html) for custom templating instructions.
61+
* `custom_body` - (Optional, String) Custom body of alert notification, if it exists. See [Alerts API reference](https://docs.databricks.com/en/sql/user/alerts/index.html) for custom templating instructions.
62+
* `parent_path` - (Optional, String) The path to a workspace folder containing the alert. The default is the user's home folder. If changed, the alert will be recreated.
63+
* `seconds_to_retrigger` - (Optional, Integer) Number of seconds an alert must wait after being triggered to rearm itself. After rearming, it can be triggered again. If 0 or not specified, the alert will not be triggered again.
64+
* `owner_user_name` - (Optional, String) Alert owner's username.
65+
* `notify_on_ok` - (Optional, Boolean) Whether to notify alert subscribers when alert returns back to normal.
66+
67+
## Attribute Reference
68+
69+
In addition to all the arguments above, the following attributes are exported:
70+
71+
* `id` - unique ID of the Alert.
72+
* `lifecycle_state` - The workspace state of the alert. Used for tracking trashed status. (Possible values are `ACTIVE` or `TRASHED`).
73+
* `state` - Current state of the alert's trigger status (`UNKNOWN`, `OK`, `TRIGGERED`). This field is set to `UNKNOWN` if the alert has not yet been evaluated or ran into an error during the last evaluation.
74+
* `create_time` - The timestamp string indicating when the alert was created.
75+
* `update_time` - The timestamp string indicating when the alert was updated.
76+
* `trigger_time` - The timestamp string when the alert was last triggered if the alert has been triggered before.
77+
78+
## Migrating from `databricks_sql_alert` resource
79+
80+
Under the hood, the new resource uses the same data as the `databricks_sql_alert`, but is exposed via a different API. This means that we can migrate existing alerts without recreating them. This operation is done in few steps:
81+
82+
* Record the ID of existing `databricks_sql_alert`, for example, by executing the `terraform state show databricks_sql_alert.alert` command.
83+
* Create the code for the new implementation by performing the following changes:
84+
* the `name` attribute is now named `display_name`
85+
* the `parent` (if exists) is renamed to `parent_path` attribute and should be converted from `folders/object_id` to the actual path.
86+
* the `options` block is converted into the `condition` block with the following changes:
87+
* the value of the `op` attribute should be converted from a mathematical operator into a string name, like, `>` is becoming `GREATER_THAN`, `==` is becoming `EQUAL`, etc.
88+
* the `column` attribute is becoming the `operand` block
89+
* the `value` attribute is becoming the `threshold` block. **Please note that the old implementation always used strings so you may have changes after import if you use `double_value` or `bool_value` inside the block.**
90+
* the `rearm` attribute is renamed to `seconds_to_retrigger`.
91+
92+
For example, if we have the original `databricks_sql_alert` defined as:
93+
94+
```hcl
95+
resource "databricks_sql_alert" "alert" {
96+
query_id = databricks_sql_query.this.id
97+
name = "My Alert"
98+
parent = "folders/${databricks_directory.shared_dir.object_id}"
99+
options {
100+
column = "value"
101+
op = ">"
102+
value = "42"
103+
muted = false
104+
}
105+
}
106+
```
107+
108+
we'll have a new resource defined as:
109+
110+
```hcl
111+
resource "databricks_alert" "alert" {
112+
query_id = databricks_sql_query.this.id
113+
display_name = "My Alert"
114+
parent_path = databricks_directory.shared_dir.path
115+
condition {
116+
op = "GREATER_THAN"
117+
operand {
118+
column {
119+
name = "value"
120+
}
121+
}
122+
threshold {
123+
value {
124+
double_value = 42
125+
}
126+
}
127+
}
128+
}
129+
```
130+
131+
### For Terraform version >= 1.7.0
132+
133+
Terraform 1.7 introduced the [removed](https://developer.hashicorp.com/terraform/language/resources/syntax#removing-resources) block in addition to the [import](https://developer.hashicorp.com/terraform/language/import) block introduced in Terraform 1.5. Together they make import and removal of resources easier, avoiding manual execution of `terraform import` and `terraform state rm` commands.
134+
135+
So with Terraform 1.7+, the migration looks as the following:
136+
137+
* remove the old alert definition and replace it with the new one.
138+
* Adjust references, like, `databricks_permissions`.
139+
* Add `import` and `removed` blocks like this:
140+
141+
```hcl
142+
import {
143+
to = databricks_alert.alert
144+
id = "<alert-id>"
145+
}
146+
147+
removed {
148+
from = databricks_sql_alert.alert
149+
150+
lifecycle {
151+
destroy = false
152+
}
153+
}
154+
```
155+
156+
* Run the `terraform plan` command to check possible changes, such as value type change, etc.
157+
* Run the `terraform apply` command to apply changes.
158+
* Remove the `import` and `removed` blocks from the code.
159+
160+
### For Terraform version < 1.7.0
161+
162+
* Remove the old alert definition and replace it with the new one.
163+
* Remove the old resource from the state with the `terraform state rm databricks_sql_alert.alert` command.
164+
* Import new resource with the `terraform import databricks_alert.alert <alert-id>` command.
165+
* Adjust references, like, `databricks_permissions`.
166+
* Run the `terraform plan` command to check possible changes, such as value type change, etc.
167+
168+
## Access Control
169+
170+
[databricks_permissions](permissions.md#sql-alert-usage) can control which groups or individual users can *Manage*, *Edit*, *Run* or *View* individual alerts.
171+
172+
```hcl
173+
resource "databricks_permissions" "alert_usage" {
174+
sql_alert_id = databricks_alert.alert.id
175+
access_control {
176+
group_name = "users"
177+
permission_level = "CAN_RUN"
178+
}
179+
}
180+
```
181+
182+
## Import
183+
184+
This resource can be imported using alert ID:
185+
186+
```bash
187+
terraform import databricks_alert.this <alert-id>
188+
```
189+
190+
## Related Resources
191+
192+
The following resources are often used in the same context:
193+
194+
* [databricks_sql_query](sql_query.md) to manage Databricks SQL [Queries](https://docs.databricks.com/sql/user/queries/index.html).
195+
* [databricks_sql_endpoint](sql_endpoint.md) to manage Databricks SQL [Endpoints](https://docs.databricks.com/sql/admin/sql-endpoints.html).
196+
* [databricks_directory](directory.md) to manage directories in [Databricks Workpace](https://docs.databricks.com/workspace/workspace-objects.html).

docs/resources/sql_alert.md

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -58,6 +58,18 @@ In addition to all arguments above, the following attributes are exported:
5858

5959
* `id` - unique ID of the SQL Alert.
6060

61+
## Access Control
62+
63+
[databricks_permissions](permissions.md#sql-alert-usage) can control which groups or individual users can *Manage*, *Edit*, *Run* or *View* individual alerts.
64+
65+
## Import
66+
67+
This resource can be imported using alert ID:
68+
69+
```bash
70+
terraform import databricks_sql_alert.this <alert-id>
71+
```
72+
6173
## Related Resources
6274

6375
The following resources are often used in the same context:

internal/acceptance/alert_test.go

Lines changed: 60 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,60 @@
1+
package acceptance
2+
3+
import (
4+
"testing"
5+
)
6+
7+
func TestAccAlert(t *testing.T) {
8+
WorkspaceLevel(t, Step{
9+
Template: `
10+
resource "databricks_sql_query" "this" {
11+
data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}"
12+
name = "tf-{var.RANDOM}"
13+
query = "SELECT 1 AS p1, 2 as p2"
14+
}
15+
16+
resource "databricks_alert" "alert" {
17+
query_id = databricks_sql_query.this.id
18+
display_name = "tf-alert-{var.RANDOM}"
19+
condition {
20+
op = "EQUAL"
21+
operand {
22+
column {
23+
name = "p2"
24+
}
25+
}
26+
threshold {
27+
value {
28+
double_value = 2
29+
}
30+
}
31+
}
32+
}
33+
`,
34+
}, Step{
35+
Template: `
36+
resource "databricks_sql_query" "this" {
37+
data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}"
38+
name = "tf-{var.RANDOM}"
39+
query = "SELECT 1 AS p1, 2 as p2"
40+
}
41+
42+
resource "databricks_alert" "alert" {
43+
query_id = databricks_sql_query.this.id
44+
display_name = "tf-alert-{var.RANDOM}"
45+
condition {
46+
op = "GREATER_THAN"
47+
operand {
48+
column {
49+
name = "p2"
50+
}
51+
}
52+
threshold {
53+
value {
54+
double_value = 3
55+
}
56+
}
57+
}
58+
}`,
59+
})
60+
}

internal/acceptance/permissions_test.go

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -837,3 +837,42 @@ func TestAccPermissions_ServingEndpoint(t *testing.T) {
837837
ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for serving-endpoint, allowed levels: CAN_MANAGE"),
838838
})
839839
}
840+
841+
func TestAccPermissions_Alert(t *testing.T) {
842+
loadDebugEnvIfRunsFromIDE(t, "workspace")
843+
alertTemplate := `
844+
resource "databricks_sql_query" "this" {
845+
name = "{var.STICKY_RANDOM}-query"
846+
query = "SELECT 1 AS p1, 2 as p2"
847+
data_source_id = "{env.TEST_DEFAULT_WAREHOUSE_DATASOURCE_ID}"
848+
}
849+
850+
resource "databricks_alert" "this" {
851+
query_id = databricks_sql_query.this.id
852+
display_name = "{var.STICKY_RANDOM}-alert"
853+
condition {
854+
op = "GREATER_THAN"
855+
operand {
856+
column {
857+
name = "value"
858+
}
859+
}
860+
threshold {
861+
value {
862+
double_value = 42
863+
}
864+
}
865+
}
866+
}
867+
`
868+
WorkspaceLevel(t, Step{
869+
Template: alertTemplate + makePermissionsTestStage("sql_alert_id", "databricks_alert.this.id", groupPermissions("CAN_VIEW")),
870+
}, Step{
871+
Template: alertTemplate + makePermissionsTestStage("sql_alert_id", "databricks_alert.this.id",
872+
currentPrincipalPermission(t, "CAN_MANAGE"), groupPermissions("CAN_VIEW", "CAN_EDIT", "CAN_RUN", "CAN_MANAGE")),
873+
}, Step{
874+
Template: alertTemplate + makePermissionsTestStage("sql_alert_id", "databricks_alert.this.id",
875+
currentPrincipalPermission(t, "CAN_VIEW"), groupPermissions("CAN_VIEW", "CAN_EDIT", "CAN_RUN", "CAN_MANAGE")),
876+
ExpectError: regexp.MustCompile("cannot remove management permissions for the current user for alert, allowed levels: CAN_MANAGE"),
877+
})
878+
}

internal/acceptance/sql_alert_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import (
44
"testing"
55
)
66

7-
func TestAccAlert(t *testing.T) {
7+
func TestAccSqlAlert(t *testing.T) {
88
WorkspaceLevel(t, Step{
99
Template: `
1010
resource "databricks_sql_query" "this" {

internal/providers/sdkv2/sdkv2.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,7 @@ func DatabricksProvider() *schema.Provider {
128128
},
129129
ResourcesMap: map[string]*schema.Resource{ // must be in alphabetical order
130130
"databricks_access_control_rule_set": permissions.ResourceAccessControlRuleSet().ToResource(),
131+
"databricks_alert": sql.ResourceAlert().ToResource(),
131132
"databricks_artifact_allowlist": catalog.ResourceArtifactAllowlist().ToResource(),
132133
"databricks_aws_s3_mount": storage.ResourceAWSS3Mount().ToResource(),
133134
"databricks_azure_adls_gen1_mount": storage.ResourceAzureAdlsGen1Mount().ToResource(),

0 commit comments

Comments
 (0)