Skip to content

Commit 52578a5

Browse files
authored
direct: Reuse graph between plan & apply; sort plan (#3455)
## Changes - Same graph is reused between plan and apply stages. Practical consequence is that dependencies that are implied via ${resources...} references are always respected during deploy even if we can evaluate the reference right away. - The order of plan is changed to match terraform. Sorting is not necessary in tests anymore. - New ResourceNode type that replaces nodeKey is using Group/Key rather than Group/Name, it's less ambiguous. ## Why Matching terraform behavior. Users could be using ${resources...} references to enforce dependencies. ## Tests New regression acceptance test.
1 parent 831096a commit 52578a5

File tree

21 files changed

+276
-176
lines changed

21 files changed

+276
-176
lines changed

acceptance/bundle/resource_deps/create_error/script

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,7 @@ print_sorted_requests() {
99
}
1010

1111
echo "*" > .gitignore
12-
# In direct, plan is sorted by deployment order. In TF it comes back sorted alphabetically.
13-
# I think deployment order is better, sorting here to remove the mismatch
14-
trace $CLI bundle plan 2>&1 | sort
12+
trace $CLI bundle plan 2>&1
1513
musterr $CLI bundle deploy &> out.deploy.$DATABRICKS_CLI_DEPLOYMENT.txt
1614
trace print_sorted_requests
1715

@@ -21,7 +19,7 @@ ind_id=`read_id.py jobs independent`
2119
echo "$ind_id:IND_ID" >> ACC_REPLS
2220

2321
title "Plan should still contain foo and bar"
24-
trace $CLI bundle plan 2>&1 | sort
22+
trace $CLI bundle plan
2523

2624
rm out.requests.txt
2725
musterr $CLI bundle deploy &> out.deploy2.$DATABRICKS_CLI_DEPLOYMENT.txt
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
2+
resources:
3+
jobs:
4+
a:
5+
name: aa
6+
description: aa_desc
7+
b:
8+
name: bb
9+
description: prefix ${resources.jobs.a.id}
10+
c:
11+
name: cc
12+
description: prefix ${resources.jobs.b.id}
13+
d:
14+
name: dd
15+
description: prefix ${resources.jobs.c.id}
16+
e:
17+
name: ee
18+
description: prefix ${resources.jobs.d.id}
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
Local = true
2+
Cloud = false
3+
4+
[EnvMatrix]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
2+
>>> [CLI] bundle plan
3+
create jobs.a
4+
create jobs.b
5+
create jobs.c
6+
create jobs.d
7+
create jobs.e
8+
9+
>>> print_requests
10+
11+
>>> [CLI] bundle deploy
12+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
13+
Deploying resources...
14+
Updating deployment state...
15+
Deployment complete!
16+
17+
>>> print_requests_short
18+
"aa"
19+
"bb"
20+
"cc"
21+
"dd"
22+
"ee"
23+
24+
>>> update_file.py databricks.yml aa_desc aa_new_desc
25+
26+
>>> update_file.py databricks.yml prefix new_prefix
27+
28+
>>> [CLI] bundle plan
29+
update jobs.a
30+
update jobs.b
31+
update jobs.c
32+
update jobs.d
33+
update jobs.e
34+
35+
>>> print_requests
36+
37+
>>> [CLI] bundle deploy
38+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
39+
Deploying resources...
40+
Updating deployment state...
41+
Deployment complete!
42+
43+
>>> print_requests_short
44+
"aa"
45+
"bb"
46+
"cc"
47+
"dd"
48+
"ee"
Lines changed: 29 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,29 @@
1+
print_all_requests() {
2+
jq --sort-keys 'select(.path | contains("/jobs"))' < out.requests.txt
3+
rm out.requests.txt
4+
}
5+
6+
print_requests() {
7+
jq --sort-keys 'select(.method != "GET" and (.path | contains("/jobs")))' < out.requests.txt
8+
rm out.requests.txt
9+
}
10+
11+
print_requests_short() {
12+
jq --sort-keys 'select(.method != "GET" and (.path | contains("/jobs"))) | (.body.name // .body.new_settings.name // empty)' < out.requests.txt
13+
rm out.requests.txt
14+
}
15+
16+
trace $CLI bundle plan
17+
trace print_requests
18+
19+
trace $CLI bundle deploy
20+
trace print_requests_short
21+
22+
trace update_file.py databricks.yml aa_desc aa_new_desc
23+
trace update_file.py databricks.yml prefix new_prefix
24+
25+
trace $CLI bundle plan
26+
trace print_requests
27+
28+
trace $CLI bundle deploy
29+
trace print_requests_short

acceptance/bundle/resource_deps/jobs_update/script

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,7 @@ print_sorted_requests() {
99
}
1010

1111
echo "*" > .gitignore
12-
# In direct, plan is sorted by deployment order. In TF it comes back sorted alphabetically.
13-
# I think deployment order is better, sorting here to remove the mismatch
14-
trace $CLI bundle plan 2>&1 | sort
12+
trace $CLI bundle plan
1513
trace $CLI bundle deploy
1614
trace print_requests
1715

acceptance/bundle/resource_deps/pipelines_recreate/output.txt

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -49,8 +49,8 @@ Deployment complete!
4949
>>> update_file.py databricks.yml mycatalog mynewcatalog
5050

5151
>>> [CLI] bundle plan
52-
recreate pipelines.foo
5352
update jobs.bar
53+
recreate pipelines.foo
5454

5555
>>> [CLI] bundle deploy --auto-approve
5656
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...

acceptance/bundle/resource_deps/pipelines_recreate/script

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,7 @@ print_sorted_requests() {
99
}
1010

1111
echo "*" > .gitignore
12-
# In direct, plan is sorted by deployment order. In TF it comes back sorted alphabetically.
13-
# I think deployment order is better, sorting here to remove the mismatch
14-
trace $CLI bundle plan 2>&1 | sort
12+
trace $CLI bundle plan
1513
trace $CLI bundle deploy
1614
trace print_requests
1715

@@ -25,7 +23,7 @@ trace $CLI bundle plan # empty
2523

2624
title "Update catalog, triggering recreate for pipeline; this means updating downstream deps"
2725
trace update_file.py databricks.yml mycatalog mynewcatalog
28-
trace $CLI bundle plan 2>&1 | sort
26+
trace $CLI bundle plan
2927
trace $CLI bundle deploy --auto-approve
3028
trace print_requests
3129

bundle/bundle.go

Lines changed: 16 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ import (
2222
"github.com/databricks/cli/bundle/metadata"
2323
"github.com/databricks/cli/bundle/terranova/tnstate"
2424
"github.com/databricks/cli/libs/auth"
25+
"github.com/databricks/cli/libs/dagrun"
2526
"github.com/databricks/cli/libs/dyn"
2627
"github.com/databricks/cli/libs/fileset"
2728
"github.com/databricks/cli/libs/locker"
@@ -127,7 +128,21 @@ type Bundle struct {
127128
// Stores the locker responsible for acquiring/releasing a deployment lock.
128129
Locker *locker.Locker
129130

130-
Plan deployplan.Plan
131+
// TerraformPlanPath is the path to the plan from the terraform CLI
132+
TerraformPlanPath string
133+
134+
// If true, the plan is empty and applying it will not do anything
135+
TerraformPlanIsEmpty bool
136+
137+
// (direct only) graph of dependencies between resources
138+
Graph *dagrun.Graph[deployplan.ResourceNode]
139+
140+
// (direct only) planned action for each resource
141+
PlannedActions map[deployplan.ResourceNode]deployplan.ActionType
142+
143+
// (direct only) flag that tells if a given node has references to it
144+
// TODO: move this inside Graph
145+
IsReferenced map[deployplan.ResourceNode]bool
131146

132147
// if true, we skip approval checks for deploy, destroy resources and delete
133148
// files

bundle/deploy/terraform/apply.go

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ func (w *apply) Name() string {
1818

1919
func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
2020
// return early if plan is empty
21-
if b.Plan.TerraformIsEmpty {
21+
if b.TerraformPlanIsEmpty {
2222
log.Debugf(ctx, "No changes in plan. Skipping terraform apply.")
2323
return nil
2424
}
@@ -28,12 +28,12 @@ func (w *apply) Apply(ctx context.Context, b *bundle.Bundle) diag.Diagnostics {
2828
return diag.Errorf("terraform not initialized")
2929
}
3030

31-
if b.Plan.TerraformPlanPath == "" {
31+
if b.TerraformPlanPath == "" {
3232
return diag.Errorf("no plan found")
3333
}
3434

3535
// Apply terraform according to the computed plan
36-
err := tf.Apply(ctx, tfexec.DirOrPlan(b.Plan.TerraformPlanPath))
36+
err := tf.Apply(ctx, tfexec.DirOrPlan(b.TerraformPlanPath))
3737
if err != nil {
3838
diags := permissions.TryExtendTerraformPermissionError(ctx, b, err)
3939
if diags != nil {

0 commit comments

Comments
 (0)