Skip to content

Commit faa6a89

Browse files
authored
[Exporter] **Breaking change** Use new query and alert resources instead of legacy resources (#4150)
## Changes <!-- Summary of your changes that are easy to understand --> This change replaces legacy `databricks_sql_query` and `databricks_sql_alert` with new resources `databricks_query` and `databricks_alert`. Also, services `sql-queries` and `sql-alerts` are renamed to `queries` and `alerts`. Other changes include: * Improve performance of Lakeview dashboards scan by using bigger page size * Generalize `isMatchingCatalogAndSchema` implementation for use in multiple resources where attribute names could be different * Generalize handling of `/Workspace` prefix when emitting notebooks, workspace files and directories. ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [ ] covered with integration tests in `internal/acceptance` - [ ] relevant acceptance tests are passing - [x] using Go SDK
1 parent 8b00572 commit faa6a89

15 files changed

+295
-373
lines changed

docs/guides/experimental-exporter.md

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@ Services are just logical groups of resources used for filtering and organizatio
111111
Please note that for services not marked with **listing**, we'll export resources only if they are referenced from other resources.
112112

113113
* `access` - [databricks_permissions](../resources/permissions.md), [databricks_instance_profile](../resources/instance_profile.md), [databricks_ip_access_list](../resources/ip_access_list.md), [databricks_mws_permission_assignment](../resources/mws_permission_assignment.md) and [databricks_access_control_rule_set](../resources/access_control_rule_set.md).
114+
* `alerts` - **listing** [databricks_alert](../resources/alert.md).
114115
* `compute` - **listing** [databricks_cluster](../resources/cluster.md).
115116
* `dashboards` - **listing** [databricks_dashboard](../resources/dashboard.md).
116117
* `directories` - **listing** [databricks_directory](../resources/directory.md). *Please note that directories aren't listed when running in the incremental mode! Only directories with updated notebooks will be emitted.*
@@ -123,13 +124,12 @@ Services are just logical groups of resources used for filtering and organizatio
123124
* `notebooks` - **listing** [databricks_notebook](../resources/notebook.md).
124125
* `policies` - **listing** [databricks_cluster_policy](../resources/cluster_policy).
125126
* `pools` - **listing** [instance pools](../resources/instance_pool.md).
127+
* `queries` - **listing** [databricks_query](../resources/query.md).
126128
* `repos` - **listing** [databricks_repo](../resources/repo.md)
127129
* `secrets` - **listing** [databricks_secret_scope](../resources/secret_scope.md) along with [keys](../resources/secret.md) and [ACLs](../resources/secret_acl.md).
128130
* `settings` - **listing** [databricks_notification_destination](../resources/notification_destination.md).
129-
* `sql-alerts` - **listing** [databricks_sql_alert](../resources/sql_alert.md).
130-
* `sql-dashboards` - **listing** [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md).
131+
* `sql-dashboards` - **listing** Legacy [databricks_sql_dashboard](../resources/sql_dashboard.md) along with associated [databricks_sql_widget](../resources/sql_widget.md) and [databricks_sql_visualization](../resources/sql_visualization.md).
131132
* `sql-endpoints` - **listing** [databricks_sql_endpoint](../resources/sql_endpoint.md) along with [databricks_sql_global_config](../resources/sql_global_config.md).
132-
* `sql-queries` - **listing** [databricks_sql_query](../resources/sql_query.md).
133133
* `storage` - only [databricks_dbfs_file](../resources/dbfs_file.md) and [databricks_file](../resources/file.md) referenced in other resources (libraries, init scripts, ...) will be downloaded locally and properly arranged into terraform state.
134134
* `uc-artifact-allowlist` - **listing** exports [databricks_artifact_allowlist](../resources/artifact_allowlist.md) resources for Unity Catalog Allow Lists attached to the current metastore.
135135
* `uc-catalogs` - **listing** [databricks_catalog](../resources/catalog.md) and [databricks_workspace_binding](../resources/workspace_binding.md)

exporter/context.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -204,8 +204,8 @@ var goroutinesNumber = map[string]int{
204204
"databricks_sql_dashboard": 3,
205205
"databricks_sql_widget": 4,
206206
"databricks_sql_visualization": 4,
207-
"databricks_sql_query": 5,
208-
"databricks_sql_alert": 2,
207+
"databricks_query": 4,
208+
"databricks_alert": 2,
209209
"databricks_permissions": 11,
210210
}
211211

exporter/exporter_test.go

Lines changed: 55 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -56,20 +56,6 @@ func getJSONObject(filename string) any {
5656
return obj
5757
}
5858

59-
func getJSONArray(filename string) any {
60-
data, err := os.ReadFile(filename)
61-
if err != nil {
62-
panic(err)
63-
}
64-
var obj []any
65-
err = json.Unmarshal(data, &obj)
66-
if err != nil {
67-
fmt.Printf("[ERROR] error! file=%s err=%v\n", filename, err)
68-
fmt.Printf("[ERROR] data=%s\n", string(data))
69-
}
70-
return obj
71-
}
72-
7359
func workspaceConfKeysToURL() string {
7460
keys := make([]string, 0, len(workspaceConfKeys))
7561
for k := range workspaceConfKeys {
@@ -379,14 +365,14 @@ var emptySqlDashboards = qa.HTTPFixture{
379365

380366
var emptySqlQueries = qa.HTTPFixture{
381367
Method: "GET",
382-
Resource: "/api/2.0/preview/sql/queries?page_size=100",
368+
Resource: "/api/2.0/sql/queries?page_size=100",
383369
Response: map[string]any{},
384370
ReuseRequest: true,
385371
}
386372

387373
var emptySqlAlerts = qa.HTTPFixture{
388374
Method: "GET",
389-
Resource: "/api/2.0/preview/sql/alerts",
375+
Resource: "/api/2.0/sql/alerts?page_size=100",
390376
Response: []tfsql.AlertEntity{},
391377
ReuseRequest: true,
392378
}
@@ -447,7 +433,7 @@ var emptyMetastoreList = qa.HTTPFixture{
447433

448434
var emptyLakeviewList = qa.HTTPFixture{
449435
Method: "GET",
450-
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
436+
Resource: "/api/2.0/lakeview/dashboards?page_size=1000",
451437
Response: sdk_dashboards.ListDashboardsResponse{},
452438
ReuseRequest: true,
453439
}
@@ -1015,6 +1001,16 @@ func TestImportingClusters(t *testing.T) {
10151001
},
10161002
},
10171003
},
1004+
{
1005+
Method: "GET",
1006+
Resource: "/api/2.0/preview/scim/v2/Users?attributes=id%2CuserName&count=100&startIndex=1",
1007+
ReuseRequest: true,
1008+
Response: scim.UserList{
1009+
Resources: []scim.User{
1010+
{ID: "123", DisplayName: "[email protected]", UserName: "[email protected]"},
1011+
},
1012+
},
1013+
},
10181014
},
10191015
func(ctx context.Context, client *common.DatabricksClient) {
10201016
os.Setenv("EXPORTER_PARALLELISM_default", "1")
@@ -1950,16 +1946,21 @@ func TestImportingSqlObjects(t *testing.T) {
19501946
},
19511947
{
19521948
Method: "GET",
1953-
Resource: "/api/2.0/preview/sql/queries?page_size=100",
1954-
Response: getJSONObject("test-data/get-sql-queries.json"),
1949+
Resource: "/api/2.0/sql/queries?page_size=100",
1950+
Response: getJSONObject("test-data/get-queries.json"),
19551951
ReuseRequest: true,
19561952
},
19571953
{
19581954
Method: "GET",
1959-
Resource: "/api/2.0/preview/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc",
1960-
Response: getJSONObject("test-data/get-sql-query.json"),
1955+
Resource: "/api/2.0/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc?",
1956+
Response: getJSONObject("test-data/get-query.json"),
19611957
ReuseRequest: true,
19621958
},
1959+
{
1960+
Method: "GET",
1961+
Resource: "/api/2.0/preview/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc",
1962+
Response: getJSONObject("test-data/get-sql-query.json"),
1963+
},
19631964
{
19641965
Method: "GET",
19651966
Resource: "/api/2.0/permissions/sql/queries/16c4f969-eea0-4aad-8f82-03d79b078dcc?",
@@ -1972,14 +1973,14 @@ func TestImportingSqlObjects(t *testing.T) {
19721973
},
19731974
{
19741975
Method: "GET",
1975-
Resource: "/api/2.0/preview/sql/alerts",
1976-
Response: getJSONArray("test-data/get-sql-alerts.json"),
1976+
Resource: "/api/2.0/sql/alerts?page_size=100",
1977+
Response: getJSONObject("test-data/get-alerts.json"),
19771978
ReuseRequest: true,
19781979
},
19791980
{
19801981
Method: "GET",
1981-
Resource: "/api/2.0/preview/sql/alerts/3cf91a42-6217-4f3c-a6f0-345d489051b9?",
1982-
Response: getJSONObject("test-data/get-sql-alert.json"),
1982+
Resource: "/api/2.0/sql/alerts/3cf91a42-6217-4f3c-a6f0-345d489051b9?",
1983+
Response: getJSONObject("test-data/get-alert.json"),
19831984
},
19841985
{
19851986
Method: "GET",
@@ -1993,18 +1994,44 @@ func TestImportingSqlObjects(t *testing.T) {
19931994

19941995
ic := newImportContext(client)
19951996
ic.Directory = tmpDir
1996-
ic.enableListing("sql-dashboards,sql-queries,sql-endpoints,sql-alerts")
1997-
ic.enableServices("sql-dashboards,sql-queries,sql-alerts,sql-endpoints,access,notebooks")
1997+
ic.enableListing("sql-dashboards,queries,sql-endpoints,alerts")
1998+
ic.enableServices("sql-dashboards,queries,alerts,sql-endpoints,access")
19981999

19992000
err := ic.Run()
20002001
assert.NoError(t, err)
20012002

2003+
// check the generated HCL for SQL Warehouses
20022004
content, err := os.ReadFile(tmpDir + "/sql-endpoints.tf")
20032005
assert.NoError(t, err)
20042006
contentStr := string(content)
20052007
assert.True(t, strings.Contains(contentStr, `enable_serverless_compute = false`))
20062008
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_endpoint" "test" {`))
20072009
assert.False(t, strings.Contains(contentStr, `tags {`))
2010+
// check the generated HCL for SQL Dashboards
2011+
content, err = os.ReadFile(tmpDir + "/sql-dashboards.tf")
2012+
assert.NoError(t, err)
2013+
contentStr = string(content)
2014+
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_dashboard" "test_9cb0c8f5_6262_4a1f_a741_2181de76028f" {`))
2015+
assert.True(t, strings.Contains(contentStr, `dashboard_id = databricks_sql_dashboard.test_9cb0c8f5_6262_4a1f_a741_2181de76028f.id`))
2016+
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_widget" "rd4dd2082685" {`))
2017+
assert.True(t, strings.Contains(contentStr, `resource "databricks_sql_visualization" "chart_16c4f969_eea0_4aad_8f82_03d79b078dcc_1a062d3a_eefe_11eb_9559_dc7cd9c86087"`))
2018+
// check the generated HCL for Qieries
2019+
content, err = os.ReadFile(tmpDir + "/queries.tf")
2020+
assert.NoError(t, err)
2021+
contentStr = string(content)
2022+
assert.True(t, strings.Contains(contentStr, `resource "databricks_query" "jobs_per_day_per_status_last_30_days_16c4f969_eea0_4aad_8f82_03d79b078dcc"`))
2023+
assert.True(t, strings.Contains(contentStr, `warehouse_id = databricks_sql_endpoint.test.id`))
2024+
assert.True(t, strings.Contains(contentStr, `owner_user_name = "[email protected]"`))
2025+
assert.True(t, strings.Contains(contentStr, `display_name = "Jobs per day per status last 30 days"`))
2026+
// check the generated HCL for Alerts
2027+
content, err = os.ReadFile(tmpDir + "/alerts.tf")
2028+
assert.NoError(t, err)
2029+
contentStr = string(content)
2030+
assert.True(t, strings.Contains(contentStr, `resource "databricks_alert" "test_alert_3cf91a42_6217_4f3c_a6f0_345d489051b9"`))
2031+
assert.True(t, strings.Contains(contentStr, `query_id = databricks_query.jobs_per_day_per_status_last_30_days_16c4f969_eea0_4aad_8f82_03d79b078dcc.id`))
2032+
assert.True(t, strings.Contains(contentStr, `display_name = "Test Alert"`))
2033+
assert.True(t, strings.Contains(contentStr, `op = "GREATER_THAN"`))
2034+
assert.True(t, strings.Contains(contentStr, `owner_user_name = "[email protected]"`))
20082035
})
20092036
}
20102037

@@ -2795,7 +2822,7 @@ func TestImportingLakeviewDashboards(t *testing.T) {
27952822
noCurrentMetastoreAttached,
27962823
{
27972824
Method: "GET",
2798-
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
2825+
Resource: "/api/2.0/lakeview/dashboards?page_size=1000",
27992826
Response: sdk_dashboards.ListDashboardsResponse{
28002827
Dashboards: []sdk_dashboards.Dashboard{
28012828
{

0 commit comments

Comments
 (0)