Skip to content

Commit 968e382

Browse files
Resolve $resources refs before deployment if set by user (#3423)
## Changes Resolve ${resources} references internally, before the deployment. This is v2 of #3370 which was reverted in #3421 because it interacted poorly with other resources transformations that are done by DABs. By moving this resolution later we ensure that all transformation are visible when we do reference replacement. ## Why Currently we resolve these via terraform, but that's not going to work with direct deployment. To minimize the changes between TF and direct and surface possible issues with this approach, we do this for both backends. --------- Co-authored-by: shreyas-goenka <[email protected]>
1 parent a76dc13 commit 968e382

File tree

8 files changed

+101
-7
lines changed

8 files changed

+101
-7
lines changed
Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,18 @@
1+
resources:
2+
schemas:
3+
bar:
4+
catalog_name: mycatalog
5+
name: myschema
6+
volumes:
7+
foo:
8+
catalog_name: ${resources.schemas.bar.catalog_name}
9+
schema_name: ${resources.schemas.bar.name}
10+
name: myname
11+
pipelines:
12+
mypipeline:
13+
# When creating the pipeline we should see dev prefix in schema_name here
14+
name: pipeline for ${resources.volumes.foo.catalog_name}.${resources.volumes.foo.schema_name}.${resources.volumes.foo.name}
15+
16+
targets:
17+
dev:
18+
mode: development
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
Local = true
2+
Cloud = false
3+
4+
[EnvMatrix]
5+
DATABRICKS_CLI_DEPLOYMENT = ["terraform", "direct-exp"]
Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
2+
>>> [CLI] bundle validate -t dev -o json
3+
{
4+
"pipelines": {
5+
"mypipeline": {
6+
"channel": "CURRENT",
7+
"deployment": {
8+
"kind": "BUNDLE",
9+
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/dev/state/metadata.json"
10+
},
11+
"development": true,
12+
"edition": "ADVANCED",
13+
"name": "[dev [USERNAME]] pipeline for ${resources.volumes.foo.catalog_name}.${resources.volumes.foo.schema_name}.${resources.volumes.foo.name}",
14+
"permissions": [],
15+
"tags": {
16+
"dev": "[USERNAME]"
17+
}
18+
}
19+
},
20+
"schemas": {
21+
"bar": {
22+
"catalog_name": "mycatalog",
23+
"name": "dev_[USERNAME]_myschema"
24+
}
25+
},
26+
"volumes": {
27+
"foo": {
28+
"catalog_name": "${resources.schemas.bar.catalog_name}",
29+
"name": "myname",
30+
"schema_name": "${resources.schemas.bar.name}",
31+
"volume_type": "MANAGED"
32+
}
33+
}
34+
}
35+
36+
>>> [CLI] bundle deploy -t dev
37+
Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/dev/files...
38+
Deploying resources...
39+
Updating deployment state...
40+
Deployment complete!
41+
42+
>>> jq -s .[] | select(.path=="/api/2.0/pipelines") | .body.name out.requests.txt
43+
"[dev [USERNAME]] pipeline for mycatalog.dev_[USERNAME]_myschema.myname"
Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
trace $CLI bundle validate -t dev -o json | jq .resources
2+
trace $CLI bundle deploy -t dev
3+
trace jq -s '.[] | select(.path=="/api/2.0/pipelines") | .body.name' out.requests.txt
4+
rm out.requests.txt
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
RecordRequests = true

bundle/config/mutator/resolve_variable_references.go

Lines changed: 18 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -229,8 +229,24 @@ func (m *resolveVariableReferences) resolveOnce(b *bundle.Bundle, prefixes []dyn
229229
// Perform resolution only if the path starts with one of the specified prefixes.
230230
for _, prefix := range prefixes {
231231
if path.HasPrefix(prefix) {
232-
hasUpdates = true
233-
return m.lookupFn(normalized, path, b)
232+
isOpt := prefix[0].Key() == "resources"
233+
var value dyn.Value
234+
var err error
235+
if isOpt {
236+
// We don't want injected zero value when resolving $resources.
237+
// We only want entries that are explicitly provided by users, so we're using root not normalized here.
238+
value, err = m.lookupFn(root, path, b)
239+
if !value.IsValid() {
240+
// Not having a value is not an error in this case, it might be resolved at deploy time. For example, output only fields.
241+
// TODO: we still could check whether it's part of the schema or not. If latter, we can reject it right away.
242+
// TODO: This might be better done after we got rid of TF.
243+
return dyn.InvalidValue, dynvar.ErrSkipResolution
244+
}
245+
} else {
246+
value, err = m.lookupFn(normalized, path, b)
247+
}
248+
hasUpdates = hasUpdates || (err == nil && value.IsValid())
249+
return value, err
234250
}
235251
}
236252

bundle/config/mutator/resourcemutator/resource_mutator.go

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -154,11 +154,6 @@ func applyNormalizeMutators(ctx context.Context, b *bundle.Bundle) {
154154
// Updates (dynamic): resources.apps.*.resources (merges app resources with the same name)
155155
MergeApps(),
156156

157-
// Reads (typed): resources.pipelines.*.{catalog,schema,target}, resources.volumes.*.{catalog_name,schema_name} (checks for schema references)
158-
// Updates (typed): resources.pipelines.*.{schema,target}, resources.volumes.*.schema_name (converts implicit schema references to explicit ${resources.schemas.<schema_key>.name} syntax)
159-
// Translates implicit schema references in DLT pipelines or UC Volumes to explicit syntax to capture dependencies
160-
CaptureSchemaDependency(),
161-
162157
// Reads (dynamic): resources.dashboards.*.file_path
163158
// Updates (dynamic): resources.dashboards.*.serialized_dashboard
164159
// Drops (dynamic): resources.dashboards.*.file_path

bundle/phases/deploy.go

Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import (
99
"github.com/databricks/cli/bundle/artifacts"
1010
"github.com/databricks/cli/bundle/config"
1111
"github.com/databricks/cli/bundle/config/mutator"
12+
"github.com/databricks/cli/bundle/config/mutator/resourcemutator"
1213
"github.com/databricks/cli/bundle/deploy"
1314
"github.com/databricks/cli/bundle/deploy/files"
1415
"github.com/databricks/cli/bundle/deploy/lock"
@@ -173,6 +174,17 @@ func deployPrepare(ctx context.Context, b *bundle.Bundle) {
173174
// TransformWheelTask depends on it and planning also depends on it.
174175
libraries.Upload(),
175176
trampoline.TransformWheelTask(),
177+
178+
mutator.ResolveVariableReferencesOnlyResources(
179+
"resources",
180+
),
181+
182+
// Reads (typed): resources.pipelines.*.{catalog,schema,target}, resources.volumes.*.{catalog_name,schema_name} (checks for schema references)
183+
// Updates (typed): resources.pipelines.*.{schema,target}, resources.volumes.*.schema_name (converts implicit schema references to explicit ${resources.schemas.<schema_key>.name} syntax)
184+
// Translates implicit schema references in DLT pipelines or UC Volumes to explicit syntax to capture dependencies
185+
// Needs to be run after ${resources} resolution since otherwise that undoes the change here.
186+
// TODO: one we have depends_on support we should leverage that here and move this back to initialize phase.
187+
resourcemutator.CaptureSchemaDependency(),
176188
)
177189
}
178190

0 commit comments

Comments
 (0)