Skip to content

Commit c409696

Browse files
authored
Merge branch 'main' into init_on_databricks
2 parents 320182d + ca6332a commit c409696

File tree

10 files changed

+316
-28
lines changed

10 files changed

+316
-28
lines changed

bundle/config/root.go

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -433,10 +433,20 @@ func rewriteShorthands(v dyn.Value) (dyn.Value, error) {
433433
}, variable.Locations()), nil
434434

435435
case dyn.KindMap, dyn.KindSequence:
436+
lookup, err := dyn.Get(variable, "lookup")
437+
// If lookup is set, we don't want to rewrite the variable and return it as is.
438+
if err == nil && lookup.Kind() != dyn.KindInvalid {
439+
return variable, nil
440+
}
441+
436442
// Check if the original definition of variable has a type field.
443+
// Type might not be found if the variable overriden in a separate file
444+
// and configuration is not merged yet.
437445
typeV, err := dyn.GetByPath(v, p.Append(dyn.Key("type")))
438446
if err != nil {
439-
return variable, nil
447+
return dyn.NewValue(map[string]dyn.Value{
448+
"default": variable,
449+
}, variable.Locations()), nil
440450
}
441451

442452
if typeV.MustString() == "complex" {

bundle/phases/deploy.go

Lines changed: 66 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -19,55 +19,95 @@ import (
1919
"github.com/databricks/cli/bundle/scripts"
2020
"github.com/databricks/cli/libs/cmdio"
2121
terraformlib "github.com/databricks/cli/libs/terraform"
22+
tfjson "github.com/hashicorp/terraform-json"
2223
)
2324

24-
func approvalForUcSchemaDelete(ctx context.Context, b *bundle.Bundle) (bool, error) {
25-
tf := b.Terraform
26-
if tf == nil {
27-
return false, fmt.Errorf("terraform not initialized")
28-
}
29-
30-
// read plan file
31-
plan, err := tf.ShowPlanFile(ctx, b.Plan.Path)
32-
if err != nil {
33-
return false, err
34-
}
35-
36-
actions := make([]terraformlib.Action, 0)
37-
for _, rc := range plan.ResourceChanges {
38-
// We only care about destructive actions on UC schema resources.
39-
if rc.Type != "databricks_schema" {
25+
func parseTerraformActions(changes []*tfjson.ResourceChange, toInclude func(typ string, actions tfjson.Actions) bool) []terraformlib.Action {
26+
res := make([]terraformlib.Action, 0)
27+
for _, rc := range changes {
28+
if !toInclude(rc.Type, rc.Change.Actions) {
4029
continue
4130
}
4231

4332
var actionType terraformlib.ActionType
44-
4533
switch {
4634
case rc.Change.Actions.Delete():
4735
actionType = terraformlib.ActionTypeDelete
4836
case rc.Change.Actions.Replace():
4937
actionType = terraformlib.ActionTypeRecreate
5038
default:
51-
// We don't need a prompt for non-destructive actions like creating
52-
// or updating a schema.
39+
// No use case for other action types yet.
5340
continue
5441
}
5542

56-
actions = append(actions, terraformlib.Action{
43+
res = append(res, terraformlib.Action{
5744
Action: actionType,
5845
ResourceType: rc.Type,
5946
ResourceName: rc.Name,
6047
})
6148
}
6249

63-
// No restricted actions planned. No need for approval.
64-
if len(actions) == 0 {
50+
return res
51+
}
52+
53+
func approvalForDeploy(ctx context.Context, b *bundle.Bundle) (bool, error) {
54+
tf := b.Terraform
55+
if tf == nil {
56+
return false, fmt.Errorf("terraform not initialized")
57+
}
58+
59+
// read plan file
60+
plan, err := tf.ShowPlanFile(ctx, b.Plan.Path)
61+
if err != nil {
62+
return false, err
63+
}
64+
65+
schemaActions := parseTerraformActions(plan.ResourceChanges, func(typ string, actions tfjson.Actions) bool {
66+
// Filter in only UC schema resources.
67+
if typ != "databricks_schema" {
68+
return false
69+
}
70+
71+
// We only display prompts for destructive actions like deleting or
72+
// recreating a schema.
73+
return actions.Delete() || actions.Replace()
74+
})
75+
76+
dltActions := parseTerraformActions(plan.ResourceChanges, func(typ string, actions tfjson.Actions) bool {
77+
// Filter in only DLT pipeline resources.
78+
if typ != "databricks_pipeline" {
79+
return false
80+
}
81+
82+
// Recreating DLT pipeline leads to metadata loss and for a transient period
83+
// the underling tables will be unavailable.
84+
return actions.Replace() || actions.Delete()
85+
})
86+
87+
// We don't need to display any prompts in this case.
88+
if len(dltActions) == 0 && len(schemaActions) == 0 {
6589
return true, nil
6690
}
6791

68-
cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data may be lost:")
69-
for _, action := range actions {
70-
cmdio.Log(ctx, action)
92+
// One or more UC schema resources will be deleted or recreated.
93+
if len(schemaActions) != 0 {
94+
cmdio.LogString(ctx, "The following UC schemas will be deleted or recreated. Any underlying data may be lost:")
95+
for _, action := range schemaActions {
96+
cmdio.Log(ctx, action)
97+
}
98+
}
99+
100+
// One or more DLT pipelines is being recreated.
101+
if len(dltActions) != 0 {
102+
msg := `
103+
This action will result in the deletion or recreation of the following DLT Pipelines along with the
104+
Streaming Tables (STs) and Materialized Views (MVs) managed by them. Recreating the Pipelines will
105+
restore the defined STs and MVs through full refresh. Note that recreation is necessary when pipeline
106+
properties such as the 'catalog' or 'storage' are changed:`
107+
cmdio.LogString(ctx, msg)
108+
for _, action := range dltActions {
109+
cmdio.Log(ctx, action)
110+
}
71111
}
72112

73113
if b.AutoApprove {
@@ -126,7 +166,7 @@ func Deploy() bundle.Mutator {
126166
terraform.CheckRunningResource(),
127167
terraform.Plan(terraform.PlanGoal("deploy")),
128168
bundle.If(
129-
approvalForUcSchemaDelete,
169+
approvalForDeploy,
130170
deployCore,
131171
bundle.LogString("Deployment cancelled!"),
132172
),

bundle/phases/deploy_test.go

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
package phases
2+
3+
import (
4+
"testing"
5+
6+
terraformlib "github.com/databricks/cli/libs/terraform"
7+
tfjson "github.com/hashicorp/terraform-json"
8+
"github.com/stretchr/testify/assert"
9+
)
10+
11+
func TestParseTerraformActions(t *testing.T) {
12+
changes := []*tfjson.ResourceChange{
13+
{
14+
Type: "databricks_pipeline",
15+
Change: &tfjson.Change{
16+
Actions: tfjson.Actions{tfjson.ActionCreate},
17+
},
18+
Name: "create pipeline",
19+
},
20+
{
21+
Type: "databricks_pipeline",
22+
Change: &tfjson.Change{
23+
Actions: tfjson.Actions{tfjson.ActionDelete},
24+
},
25+
Name: "delete pipeline",
26+
},
27+
{
28+
Type: "databricks_pipeline",
29+
Change: &tfjson.Change{
30+
Actions: tfjson.Actions{tfjson.ActionDelete, tfjson.ActionCreate},
31+
},
32+
Name: "recreate pipeline",
33+
},
34+
{
35+
Type: "databricks_whatever",
36+
Change: &tfjson.Change{
37+
Actions: tfjson.Actions{tfjson.ActionDelete, tfjson.ActionCreate},
38+
},
39+
Name: "recreate whatever",
40+
},
41+
}
42+
43+
res := parseTerraformActions(changes, func(typ string, actions tfjson.Actions) bool {
44+
if typ != "databricks_pipeline" {
45+
return false
46+
}
47+
48+
if actions.Delete() || actions.Replace() {
49+
return true
50+
}
51+
52+
return false
53+
})
54+
55+
assert.Equal(t, []terraformlib.Action{
56+
{
57+
Action: terraformlib.ActionTypeDelete,
58+
ResourceType: "databricks_pipeline",
59+
ResourceName: "delete pipeline",
60+
},
61+
{
62+
Action: terraformlib.ActionTypeRecreate,
63+
ResourceType: "databricks_pipeline",
64+
ResourceName: "recreate pipeline",
65+
},
66+
}, res)
67+
}

bundle/tests/complex_variables_test.go

Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,3 +68,22 @@ func TestComplexVariablesOverride(t *testing.T) {
6868
require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.random"])
6969
require.Equal(t, "", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.PolicyId)
7070
}
71+
72+
func TestComplexVariablesOverrideWithMultipleFiles(t *testing.T) {
73+
b, diags := loadTargetWithDiags("variables/complex_multiple_files", "dev")
74+
require.Empty(t, diags)
75+
76+
diags = bundle.Apply(context.Background(), b, bundle.Seq(
77+
mutator.SetVariables(),
78+
mutator.ResolveVariableReferencesInComplexVariables(),
79+
mutator.ResolveVariableReferences(
80+
"variables",
81+
),
82+
))
83+
require.NoError(t, diags.Error())
84+
85+
require.Equal(t, "14.2.x-scala2.11", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkVersion)
86+
require.Equal(t, "Standard_DS3_v2", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NodeTypeId)
87+
require.Equal(t, 4, b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.NumWorkers)
88+
require.Equal(t, "false", b.Config.Resources.Jobs["my_job"].JobClusters[0].NewCluster.SparkConf["spark.speculation"])
89+
}
Lines changed: 17 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
bundle:
2+
name: complex-variables-multiple-files
3+
4+
resources:
5+
jobs:
6+
my_job:
7+
job_clusters:
8+
- job_cluster_key: key
9+
new_cluster: ${var.cluster}
10+
11+
variables:
12+
cluster:
13+
type: complex
14+
description: "A cluster definition"
15+
16+
include:
17+
- ./variables/*.yml
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
targets:
2+
default:
3+
dev:
4+
variables:
5+
cluster:
6+
spark_version: "14.2.x-scala2.11"
7+
node_type_id: "Standard_DS3_v2"
8+
num_workers: 4
9+
spark_conf:
10+
spark.speculation: false
11+
spark.databricks.delta.retentionDurationCheck.enabled: false
Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,8 @@
1+
{
2+
"properties": {
3+
"unique_id": {
4+
"type": "string",
5+
"description": "Unique ID for the schema and pipeline names"
6+
}
7+
}
8+
}
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
bundle:
2+
name: "bundle-playground"
3+
4+
variables:
5+
catalog:
6+
description: The catalog the DLT pipeline should use.
7+
default: main
8+
9+
10+
resources:
11+
pipelines:
12+
foo:
13+
name: test-pipeline-{{.unique_id}}
14+
libraries:
15+
- notebook:
16+
path: ./nb.sql
17+
development: true
18+
catalog: ${var.catalog}
19+
20+
include:
21+
- "*.yml"
22+
23+
targets:
24+
development:
25+
default: true
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
-- Databricks notebook source
2+
select 1

0 commit comments

Comments
 (0)