Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions acceptance/bin/print_state.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#!/usr/bin/env python3
"""
Print resources state from default target.

Note, this intentionally has no logic on guessing what is the right state file (e.g. via DATABRICKS_BUNDLE_ENGINE),
the goal is to record all states that are available.
"""

import os
import argparse


def write(filename):
data = open(filename).read()
print(data, end="")
if not data.endswith("\n"):
print()


def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--target", default="default")
args = parser.parse_args()

filename = f".databricks/bundle/{args.target}/terraform/terraform.tfstate"
if os.path.exists(filename):
write(filename)

filename = f".databricks/bundle/{args.target}/resources.json"
if os.path.exists(filename):
write(filename)


if __name__ == "__main__":
main()
1 change: 1 addition & 0 deletions acceptance/bundle/debug/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ Usage:
Available Commands:
plan Show deployment plan in JSON format (experimental)
refschema Dump all relevant fields all bundle resources
states Show available state files

Flags:
-h, --help help for debug
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

24 changes: 24 additions & 0 deletions acceptance/bundle/help/bundle-deployment-migrate/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@

>>> [CLI] bundle deployment migrate --help
This command converts your bundle from using Terraform for deployment to using
the Direct deployment engine. It reads resource IDs from the existing Terraform
state and creates a Direct deployment state file (resources.json) with the same
lineage and incremented serial number.

Note, the migration is performed locally only. To finalize it, run 'bundle deploy'. This will synchronize the state file
to the workspace so that subsequent deploys of this bundle use direct deployment engine as well.

WARNING: Both direct deployment engine and this command are experimental and not recommended for production targets yet.

Usage:
databricks bundle deployment migrate [flags]

Flags:
-h, --help help for migrate

Global Flags:
--debug enable debug logging
-o, --output type output type: text or json (default text)
-p, --profile string ~/.databrickscfg profile
-t, --target string bundle target to use (if applicable)
--var strings set values for variables defined in bundle config. Example: --var="foo=bar"
1 change: 1 addition & 0 deletions acceptance/bundle/help/bundle-deployment-migrate/script
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
trace $CLI bundle deployment migrate --help
1 change: 1 addition & 0 deletions acceptance/bundle/help/bundle-deployment/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ Usage:

Available Commands:
bind Bind bundle-defined resources to existing resources
migrate Migrate from Terraform to Direct deployment engine
unbind Unbind bundle-defined resources from its managed remote resource

Flags:
Expand Down
55 changes: 55 additions & 0 deletions acceptance/bundle/migrate/basic/databricks.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
bundle:
name: migrate-basic-test

resources:
jobs:
test_job:
name: "Test Migration Job"
tasks:
- task_key: "main"
notebook_task:
notebook_path: "./notebook.py"
# permissions dont work yet
#permissions:
# - level: CAN_VIEW
# user_name: [email protected]
volumes:
test_volume:
catalog_name: "mycat"
schema_name: "myschema"
name: "myvol"

pipelines:
test_pipeline:
name: "Test Migration Pipeline"
tags:
# ids
myjob_id: ${resources.jobs.test_job.id}
myvolume_id: ${resources.volumes.test_volume.id}

# local field, string:
myjob_name: ${resources.jobs.test_job.name}
volume_catalog_name: ${resources.volumes.test_volume.catalog_name}

# Remote fields cause permanent drift (unrelated to migration)
# remote field, int, null
myjob_timeout: ${resources.jobs.test_job.timeout_seconds}

# remote field, string:
volume_storage_location: ${resources.volumes.test_volume.storage_location}
libraries:
- notebook:
path: "./pipeline.py"
#permissions:
# - level: CAN_MANAGE
# user_name: [email protected]

targets:
dev:
default: true
prod:
resources:
schemas:
test_schema:
catalog_name: mycat
name: myschema
2 changes: 2 additions & 0 deletions acceptance/bundle/migrate/basic/notebook.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Databricks notebook source
print("Hello from test migration job")
66 changes: 66 additions & 0 deletions acceptance/bundle/migrate/basic/out.new_state.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
{
"lineage": "[UUID]",
"serial": 5,
"state": {
"resources.jobs.test_job": {
"__id__": "[NUMID]",
"state": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/migrate-basic-test/dev/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"max_concurrent_runs": 1,
"name": "Test Migration Job",
"queue": {
"enabled": true
},
"tasks": [
{
"notebook_task": {
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/migrate-basic-test/dev/files/notebook"
},
"task_key": "main"
}
]
}
},
"resources.pipelines.test_pipeline": {
"__id__": "[UUID]",
"state": {
"channel": "CURRENT",
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/migrate-basic-test/dev/state/metadata.json"
},
"edition": "ADVANCED",
"libraries": [
{
"notebook": {
"path": "/Workspace/Users/[USERNAME]/.bundle/migrate-basic-test/dev/files/pipeline"
}
}
],
"name": "Test Migration Pipeline",
"tags": {
"myjob_id": "[NUMID]",
"myjob_name": "Test Migration Job",
"myjob_timeout": "",
"myvolume_id": "mycat.myschema.myvol",
"volume_catalog_name": "mycat",
"volume_storage_location": "s3://deco-uc-prod-isolated-aws-us-east-1/metastore/[UUID]/volumes/[UUID]"
}
}
},
"resources.volumes.test_volume": {
"__id__": "mycat.myschema.myvol",
"state": {
"catalog_name": "mycat",
"name": "myvol",
"schema_name": "myschema",
"volume_type": "MANAGED"
}
}
}
}
Loading
Loading