Skip to content

Commit 0e32851

Browse files
authored
[Exporter] Add support for exporting of Lakeview dashboards (#3779)
## Changes <!-- Summary of your changes that are easy to understand --> This adds support for exporting of `databricks_dashboard` resource and dependencies. Current limitations: - No support for incremental mode - List operation fails on big lists (not clear if it's a problem with API or Go SDK) ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [ ] covered with integration tests in `internal/acceptance` - [ ] relevant acceptance tests are passing - [x] using Go SDK
1 parent 733c998 commit 0e32851

File tree

3 files changed

+173
-0
lines changed

3 files changed

+173
-0
lines changed

docs/guides/experimental-exporter.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -112,6 +112,7 @@ Services are just logical groups of resources used for filtering and organizatio
112112

113113
* `access` - [databricks_permissions](../resources/permissions.md), [databricks_instance_profile](../resources/instance_profile.md), [databricks_ip_access_list](../resources/ip_access_list.md), [databricks_mws_permission_assignment](../resources/mws_permission_assignment.md) and [databricks_access_control_rule_set](../resources/access_control_rule_set.md).
114114
* `compute` - **listing** [databricks_cluster](../resources/cluster.md).
115+
* `dashboards` - **listing** [databricks_dashboard](../resources/dashboard.md).
115116
* `directories` - **listing** [databricks_directory](../resources/directory.md). *Please note that directories aren't listed when running in the incremental mode! Only directories with updated notebooks will be emitted.*
116117
* `dlt` - **listing** [databricks_pipeline](../resources/pipeline.md).
117118
* `groups` - **listing** [databricks_group](../data-sources/group.md) with [membership](../resources/group_member.md) and [data access](../resources/group_instance_profile.md).
@@ -171,6 +172,7 @@ Exporter aims to generate HCL code for most of the resources within the Databric
171172
| [databricks_cluster](../resources/cluster.md) | Yes | No | Yes | No |
172173
| [databricks_cluster_policy](../resources/cluster_policy.md) | Yes | No | Yes | No |
173174
| [databricks_connection](../resources/connection.md) | Yes | Yes | Yes | No |
175+
| [databricks_dashboard](../resources/dashboard.md) | Yes | No | Yes | No |
174176
| [databricks_dbfs_file](../resources/dbfs_file.md) | Yes | No | Yes | No |
175177
| [databricks_external_location](../resources/external_location.md) | Yes | Yes | Yes | No |
176178
| [databricks_file](../resources/file.md) | Yes | No | Yes | No |

exporter/exporter_test.go

Lines changed: 77 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ import (
1515
"github.com/databricks/databricks-sdk-go/apierr"
1616
"github.com/databricks/databricks-sdk-go/service/catalog"
1717
"github.com/databricks/databricks-sdk-go/service/compute"
18+
sdk_dashboards "github.com/databricks/databricks-sdk-go/service/dashboards"
1819
"github.com/databricks/databricks-sdk-go/service/iam"
1920
sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs"
2021
"github.com/databricks/databricks-sdk-go/service/ml"
@@ -436,6 +437,13 @@ var emptyMetastoreList = qa.HTTPFixture{
436437
ReuseRequest: true,
437438
}
438439

440+
var emptyLakeviewList = qa.HTTPFixture{
441+
Method: "GET",
442+
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
443+
Response: sdk_dashboards.ListDashboardsResponse{},
444+
ReuseRequest: true,
445+
}
446+
439447
func TestImportingUsersGroupsSecretScopes(t *testing.T) {
440448
listSpFixtures := qa.ListServicePrincipalsFixtures([]iam.ServicePrincipal{
441449
{
@@ -457,6 +465,7 @@ func TestImportingUsersGroupsSecretScopes(t *testing.T) {
457465
qa.HTTPFixturesApply(t,
458466
[]qa.HTTPFixture{
459467
noCurrentMetastoreAttached,
468+
emptyLakeviewList,
460469
emptyMetastoreList,
461470
meAdminFixture,
462471
emptyRepos,
@@ -729,6 +738,7 @@ func TestImportingNoResourcesError(t *testing.T) {
729738
},
730739
},
731740
noCurrentMetastoreAttached,
741+
emptyLakeviewList,
732742
emptyMetastoreList,
733743
emptyRepos,
734744
emptyExternalLocations,
@@ -2623,3 +2633,70 @@ func TestImportingRunJobTask(t *testing.T) {
26232633
}`))
26242634
})
26252635
}
2636+
2637+
func TestImportingLakeviewDashboards(t *testing.T) {
2638+
qa.HTTPFixturesApply(t,
2639+
[]qa.HTTPFixture{
2640+
{
2641+
Method: "GET",
2642+
ReuseRequest: true,
2643+
Resource: "/api/2.0/preview/scim/v2/Me",
2644+
Response: scim.User{
2645+
Groups: []scim.ComplexValue{
2646+
{
2647+
Display: "admins",
2648+
},
2649+
},
2650+
UserName: "[email protected]",
2651+
},
2652+
},
2653+
noCurrentMetastoreAttached,
2654+
{
2655+
Method: "GET",
2656+
Resource: "/api/2.0/lakeview/dashboards?page_size=100",
2657+
Response: sdk_dashboards.ListDashboardsResponse{
2658+
Dashboards: []sdk_dashboards.Dashboard{
2659+
{
2660+
DashboardId: "9cb0c8f562624a1f",
2661+
DisplayName: "Dashboard1",
2662+
},
2663+
},
2664+
},
2665+
ReuseRequest: true,
2666+
},
2667+
{
2668+
Method: "GET",
2669+
Resource: "/api/2.0/lakeview/dashboards/9cb0c8f562624a1f?",
2670+
Response: sdk_dashboards.Dashboard{
2671+
DashboardId: "9cb0c8f562624a1f",
2672+
DisplayName: "Dashboard1",
2673+
ParentPath: "/",
2674+
Path: "/Dashboard1.lvdash.json",
2675+
SerializedDashboard: `{}`,
2676+
WarehouseId: "1234",
2677+
},
2678+
},
2679+
},
2680+
func(ctx context.Context, client *common.DatabricksClient) {
2681+
tmpDir := fmt.Sprintf("/tmp/tf-%s", qa.RandomName())
2682+
defer os.RemoveAll(tmpDir)
2683+
2684+
ic := newImportContext(client)
2685+
ic.Directory = tmpDir
2686+
ic.enableListing("dashboards")
2687+
ic.enableServices("dashboards")
2688+
2689+
err := ic.Run()
2690+
assert.NoError(t, err)
2691+
2692+
content, err := os.ReadFile(tmpDir + "/dashboards.tf")
2693+
assert.NoError(t, err)
2694+
contentStr := string(content)
2695+
assert.True(t, strings.Contains(contentStr, `resource "databricks_dashboard" "dashboard1_9cb0c8f562624a1f"`))
2696+
assert.True(t, strings.Contains(contentStr, `file_path = "${path.module}/dashboards/Dashboard1_9cb0c8f562624a1f.lvdash.json"`))
2697+
content, err = os.ReadFile(tmpDir + "/dashboards/Dashboard1_9cb0c8f562624a1f.lvdash.json")
2698+
assert.NoError(t, err)
2699+
contentStr = string(content)
2700+
assert.Equal(t, `{}`, contentStr)
2701+
})
2702+
}

exporter/importables.go

Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import (
1616
"github.com/databricks/databricks-sdk-go/apierr"
1717
"github.com/databricks/databricks-sdk-go/service/catalog"
1818
"github.com/databricks/databricks-sdk-go/service/compute"
19+
"github.com/databricks/databricks-sdk-go/service/dashboards"
1920
"github.com/databricks/databricks-sdk-go/service/iam"
2021
sdk_jobs "github.com/databricks/databricks-sdk-go/service/jobs"
2122
"github.com/databricks/databricks-sdk-go/service/ml"
@@ -1109,6 +1110,7 @@ var resourcesMap map[string]importable = map[string]importable{
11091110
{Path: "sql_alert_id", Resource: "databricks_sql_alert"},
11101111
{Path: "sql_dashboard_id", Resource: "databricks_sql_dashboard"},
11111112
{Path: "sql_endpoint_id", Resource: "databricks_sql_endpoint"},
1113+
{Path: "dashboard_id", Resource: "databricks_dashboard"},
11121114
{Path: "registered_model_id", Resource: "databricks_mlflow_model"},
11131115
{Path: "experiment_id", Resource: "databricks_mlflow_experiment"},
11141116
{Path: "repo_id", Resource: "databricks_repo"},
@@ -3091,4 +3093,96 @@ var resourcesMap map[string]importable = map[string]importable{
30913093
{Resource: "databricks_group", Path: "principal_id"},
30923094
},
30933095
},
3096+
"databricks_dashboard": {
3097+
WorkspaceLevel: true,
3098+
Service: "dashboards",
3099+
List: func(ic *importContext) error {
3100+
dashboards, err := ic.workspaceClient.Lakeview.ListAll(ic.Context, dashboards.ListDashboardsRequest{PageSize: 100})
3101+
if err != nil {
3102+
return err
3103+
}
3104+
for i, d := range dashboards {
3105+
if !ic.MatchesName(d.DisplayName) {
3106+
continue
3107+
}
3108+
// TODO: add emit for incremental mode. Use already defined functions for emitting?
3109+
ic.Emit(&resource{
3110+
Resource: "databricks_dashboard",
3111+
ID: d.DashboardId,
3112+
})
3113+
if i%100 == 0 {
3114+
log.Printf("[INFO] Processed %d dashboard out of %d", i+1, len(dashboards))
3115+
}
3116+
}
3117+
return nil
3118+
},
3119+
Name: func(ic *importContext, d *schema.ResourceData) string {
3120+
s := d.Get("parent_path").(string)
3121+
if s != "" {
3122+
s = s[1:]
3123+
if s != "" {
3124+
s = s + "_"
3125+
}
3126+
}
3127+
dname := d.Get("display_name").(string)
3128+
if dname != "" {
3129+
s = s + dname
3130+
}
3131+
s = s + "_" + d.Id()
3132+
return nameNormalizationRegex.ReplaceAllString(s, "_")
3133+
},
3134+
Import: func(ic *importContext, r *resource) error {
3135+
path := r.Data.Get("path").(string)
3136+
if strings.HasPrefix(path, "/Repos") {
3137+
ic.emitRepoByPath(path)
3138+
return nil
3139+
}
3140+
parts := strings.Split(path, "/")
3141+
plen := len(parts)
3142+
if idx := strings.Index(parts[plen-1], "."); idx != -1 {
3143+
parts[plen-1] = parts[plen-1][:idx] + "_" + r.ID + parts[plen-1][idx:]
3144+
} else {
3145+
parts[plen-1] = parts[plen-1] + "_" + r.ID
3146+
}
3147+
name := fileNameNormalizationRegex.ReplaceAllString(strings.Join(parts, "/")[1:], "_")
3148+
fileName, err := ic.saveFileIn("dashboards", name, []byte(r.Data.Get("serialized_dashboard").(string)))
3149+
if err != nil {
3150+
return err
3151+
}
3152+
r.Data.Set("file_path", fileName)
3153+
r.Data.Set("serialized_dashboard", "")
3154+
3155+
ic.emitPermissionsIfNotIgnored(r, "/dashboards/"+r.ID,
3156+
"dashboard_"+ic.Importables["databricks_dashboard"].Name(ic, r.Data))
3157+
parentPath := r.Data.Get("parent_path").(string)
3158+
if parentPath != "" && parentPath != "/" {
3159+
ic.Emit(&resource{
3160+
Resource: "databricks_directory",
3161+
ID: parentPath,
3162+
})
3163+
}
3164+
warehouseId := r.Data.Get("warehouse_id").(string)
3165+
if warehouseId != "" {
3166+
ic.Emit(&resource{
3167+
Resource: "databricks_sql_endpoint",
3168+
ID: warehouseId,
3169+
})
3170+
}
3171+
3172+
return nil
3173+
},
3174+
ShouldOmitField: func(ic *importContext, pathString string, as *schema.Schema, d *schema.ResourceData) bool {
3175+
return pathString == "dashboard_change_detected" || shouldOmitMd5Field(ic, pathString, as, d)
3176+
},
3177+
Ignore: func(ic *importContext, r *resource) bool {
3178+
return strings.HasPrefix(r.Data.Get("path").(string), "/Repos") || strings.HasPrefix(r.Data.Get("parent_path").(string), "/Repos")
3179+
},
3180+
Depends: []reference{
3181+
{Path: "file_path", File: true},
3182+
{Path: "warehouse_id", Resource: "databricks_sql_endpoint"},
3183+
{Path: "parent_path", Resource: "databricks_directory"},
3184+
{Path: "parent_path", Resource: "databricks_user", Match: "home"},
3185+
{Path: "parent_path", Resource: "databricks_service_principal"},
3186+
},
3187+
},
30943188
}

0 commit comments

Comments
 (0)