diff --git a/acceptance/bundle/migrate/default-python/out.plan_after_deploy.json b/acceptance/bundle/migrate/default-python/out.plan_after_deploy.json index 60d2bf1af5..24eaa0fdf6 100644 --- a/acceptance/bundle/migrate/default-python/out.plan_after_deploy.json +++ b/acceptance/bundle/migrate/default-python/out.plan_after_deploy.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } @@ -130,7 +129,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/migrate/default-python/out.plan_after_migration.json b/acceptance/bundle/migrate/default-python/out.plan_after_migration.json index df22957e03..bd3bb24086 100644 --- a/acceptance/bundle/migrate/default-python/out.plan_after_migration.json +++ b/acceptance/bundle/migrate/default-python/out.plan_after_migration.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } @@ -231,10 +230,6 @@ "action": "skip", "reason": "server_side_default" }, - "job_clusters[0].new_cluster.num_workers": { - "action": "update", - "old": 0 - }, "tasks[task_key='notebook_task'].libraries[0].whl": { "action": "update", "old": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][2]-py3-none-any.whl", diff --git a/acceptance/bundle/migrate/default-python/out.state_after_migration.json b/acceptance/bundle/migrate/default-python/out.state_after_migration.json index 8580bd570e..5132e7ac58 100644 --- a/acceptance/bundle/migrate/default-python/out.state_after_migration.json +++ b/acceptance/bundle/migrate/default-python/out.state_after_migration.json @@ -21,7 +21,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/migrate/default-python/output.txt b/acceptance/bundle/migrate/default-python/output.txt index ac8cb0eac0..2f9e474c20 100644 --- a/acceptance/bundle/migrate/default-python/output.txt +++ b/acceptance/bundle/migrate/default-python/output.txt @@ -82,10 +82,6 @@ Building python_artifact... "action": "skip", "reason": "server_side_default" }, - "job_clusters[0].new_cluster.num_workers": { - "action": "update", - "old": 0 - }, "tasks[task_key='notebook_task'].libraries[0].whl": { "action": "update", "old": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl", @@ -160,10 +156,6 @@ Building python_artifact... "action": "skip", "reason": "server_side_default" }, - "job_clusters[0].new_cluster.num_workers": { - "action": "update", - "old": 0 - }, "tasks[task_key='notebook_task'].libraries[0].whl": { "action": "update", "old": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl", diff --git a/acceptance/bundle/resource_deps/jobs_update_remote/job_update.json b/acceptance/bundle/resource_deps/jobs_update_remote/job_update.json index c5bc39612a..de0326341b 100644 --- a/acceptance/bundle/resource_deps/jobs_update_remote/job_update.json +++ b/acceptance/bundle/resource_deps/jobs_update_remote/job_update.json @@ -11,7 +11,6 @@ { "job_cluster_key": "key", "new_cluster": { - "num_workers": 0, "spark_version": "13.3.x-scala2.12" } } diff --git a/acceptance/bundle/resource_deps/jobs_update_remote/out.plan_update.direct.json b/acceptance/bundle/resource_deps/jobs_update_remote/out.plan_update.direct.json index 41dd61c254..9607840f76 100644 --- a/acceptance/bundle/resource_deps/jobs_update_remote/out.plan_update.direct.json +++ b/acceptance/bundle/resource_deps/jobs_update_remote/out.plan_update.direct.json @@ -98,7 +98,6 @@ { "job_cluster_key": "key", "new_cluster": { - "num_workers": 0, "spark_version": "13.3.x-scala2.12" } } @@ -125,6 +124,10 @@ "action": "skip", "reason": "server_side_default" }, + "job_clusters[0].new_cluster.num_workers": { + "action": "update", + "old": 0 + }, "timeout_seconds": { "action": "skip", "reason": "server_side_default" diff --git a/acceptance/bundle/resource_deps/jobs_update_remote/output.txt b/acceptance/bundle/resource_deps/jobs_update_remote/output.txt index 0d8f9b0c71..1ec1c91943 100644 --- a/acceptance/bundle/resource_deps/jobs_update_remote/output.txt +++ b/acceptance/bundle/resource_deps/jobs_update_remote/output.txt @@ -107,7 +107,6 @@ Destroy complete! { "job_cluster_key": "key", "new_cluster": { - "num_workers": 0, "spark_version": "13.3.x-scala2.12" } } diff --git a/acceptance/bundle/resources/jobs/create-error/output.txt b/acceptance/bundle/resources/jobs/create-error/output.txt index 398e6e056b..0fcd944efd 100644 --- a/acceptance/bundle/resources/jobs/create-error/output.txt +++ b/acceptance/bundle/resources/jobs/create-error/output.txt @@ -1,9 +1,5 @@ >>> musterr [CLI] bundle deploy --force-lock -Warning: required field "new_cluster" is not set - at resources.jobs.foo.job_clusters[0] - in databricks.yml:7:11 - Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... Deploying resources... Error: cannot create resources.jobs.foo: Shared job cluster feature is only supported in multi-task jobs. (400 INVALID_PARAMETER_VALUE) diff --git a/acceptance/bundle/resources/jobs/num_workers/databricks.yml.tmpl b/acceptance/bundle/resources/jobs/num_workers/databricks.yml.tmpl new file mode 100644 index 0000000000..8526b799f7 --- /dev/null +++ b/acceptance/bundle/resources/jobs/num_workers/databricks.yml.tmpl @@ -0,0 +1,71 @@ +bundle: + name: test-bundle + +resources: + jobs: + sample_job: + name: sample_job + + trigger: + # Run this job every day, exactly one day from the last run; see https://docs.databricks.com/api/workspace/jobs/create#trigger + periodic: + interval: 1 + unit: DAYS + + tasks: + - task_key: notebook_task + notebook_task: + notebook_path: sample_notebook.py + source: WORKSPACE # Without this, there is a different request between direct and terraform + + job_clusters: + - job_cluster_key: job_cluster_autoscale + new_cluster: + spark_version: 16.4.x-scala2.12 + node_type_id: $NODE_TYPE_ID + data_security_mode: SINGLE_USER + autoscale: + min_workers: 1 + max_workers: 4 + + # This config results in different request between terraform and direct: + # Terraform removes "num_workers: 0" and direct sends it as is. + # This is acceptable difference, users will get appropriate error message backend and can correct their config. + # + #- job_cluster_key: job_cluster_autoscale_num_workers0 + # new_cluster: + # spark_version: 16.4.x-scala2.13 + # node_type_id: $NODE_TYPE_ID + # data_security_mode: SINGLE_USER + # autoscale: + # min_workers: 1 + # max_workers: 4 + # num_workers: 0 + + - job_cluster_key: job_cluster_autoscale_num_workers1 + new_cluster: + spark_version: 16.4.x-scala2.14 + node_type_id: $NODE_TYPE_ID + data_security_mode: SINGLE_USER + autoscale: + min_workers: 1 + max_workers: 4 + num_workers: 1 + + - job_cluster_key: job_cluster_num_workers1 + new_cluster: + spark_version: 16.4.x-scala2.15 + node_type_id: $NODE_TYPE_ID + data_security_mode: SINGLE_USER + num_workers: 1 + + - job_cluster_key: job_cluster_num_workers0 + new_cluster: + spark_version: 16.4.x-scala2.16 + node_type_id: $NODE_TYPE_ID + data_security_mode: SINGLE_USER + num_workers: 0 + + - job_cluster_key: job_cluster_default + new_cluster: + spark_version: 16.4.x-scala2.17 diff --git a/acceptance/bundle/resources/jobs/num_workers/out.test.toml b/acceptance/bundle/resources/jobs/num_workers/out.test.toml new file mode 100644 index 0000000000..d560f1de04 --- /dev/null +++ b/acceptance/bundle/resources/jobs/num_workers/out.test.toml @@ -0,0 +1,5 @@ +Local = true +Cloud = false + +[EnvMatrix] + DATABRICKS_BUNDLE_ENGINE = ["terraform", "direct"] diff --git a/acceptance/bundle/resources/jobs/num_workers/output.txt b/acceptance/bundle/resources/jobs/num_workers/output.txt new file mode 100644 index 0000000000..16e4d600cd --- /dev/null +++ b/acceptance/bundle/resources/jobs/num_workers/output.txt @@ -0,0 +1,128 @@ + +>>> [CLI] bundle deploy +Warning: Single node cluster is not correctly configured + at resources.jobs.sample_job.job_clusters[3].new_cluster + in databricks.yml:64:13 + +num_workers should be 0 only for single-node clusters. To create a +valid single node cluster please ensure that the following properties +are correctly set in the cluster specification: + + spark_conf: + spark.databricks.cluster.profile: singleNode + spark.master: local[*] + + custom_tags: + ResourceClass: SingleNode + + +Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files... +Deploying resources... +Updating deployment state... +Deployment complete! + +>>> print_requests.py //jobs +{ + "method": "POST", + "path": "/api/2.2/jobs/create", + "body": { + "deployment": { + "kind": "BUNDLE", + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json" + }, + "edit_mode": "UI_LOCKED", + "format": "MULTI_TASK", + "job_clusters": [ + { + "job_cluster_key": "job_cluster_autoscale", + "new_cluster": { + "autoscale": { + "max_workers": 4, + "min_workers": 1 + }, + "data_security_mode": "SINGLE_USER", + "node_type_id": "[NODE_TYPE_ID]", + "spark_version": "16.4.x-scala2.12" + } + }, + { + "job_cluster_key": "job_cluster_autoscale_num_workers1", + "new_cluster": { + "autoscale": { + "max_workers": 4, + "min_workers": 1 + }, + "data_security_mode": "SINGLE_USER", + "node_type_id": "[NODE_TYPE_ID]", + "num_workers": 1, + "spark_version": "16.4.x-scala2.14" + } + }, + { + "job_cluster_key": "job_cluster_num_workers1", + "new_cluster": { + "data_security_mode": "SINGLE_USER", + "node_type_id": "[NODE_TYPE_ID]", + "num_workers": 1, + "spark_version": "16.4.x-scala2.15" + } + }, + { + "job_cluster_key": "job_cluster_num_workers0", + "new_cluster": { + "data_security_mode": "SINGLE_USER", + "node_type_id": "[NODE_TYPE_ID]", + "num_workers": 0, + "spark_version": "16.4.x-scala2.16" + } + }, + { + "job_cluster_key": "job_cluster_default", + "new_cluster": { + "num_workers": 0, + "spark_version": "16.4.x-scala2.17" + } + } + ], + "max_concurrent_runs": 1, + "name": "sample_job", + "queue": { + "enabled": true + }, + "tasks": [ + { + "notebook_task": { + "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/sample_notebook", + "source": "WORKSPACE" + }, + "task_key": "notebook_task" + } + ], + "trigger": { + "pause_status": "UNPAUSED", + "periodic": { + "interval": 1, + "unit": "DAYS" + } + } + } +} + +>>> [CLI] bundle plan +Warning: Single node cluster is not correctly configured + at resources.jobs.sample_job.job_clusters[3].new_cluster + in databricks.yml:64:13 + +num_workers should be 0 only for single-node clusters. To create a +valid single node cluster please ensure that the following properties +are correctly set in the cluster specification: + + spark_conf: + spark.databricks.cluster.profile: singleNode + spark.master: local[*] + + custom_tags: + ResourceClass: SingleNode + + +Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged diff --git a/acceptance/bundle/resources/jobs/num_workers/sample_notebook.py b/acceptance/bundle/resources/jobs/num_workers/sample_notebook.py new file mode 100644 index 0000000000..1645e04b1d --- /dev/null +++ b/acceptance/bundle/resources/jobs/num_workers/sample_notebook.py @@ -0,0 +1 @@ +# Databricks notebook source diff --git a/acceptance/bundle/resources/jobs/num_workers/script b/acceptance/bundle/resources/jobs/num_workers/script new file mode 100644 index 0000000000..8e430e4306 --- /dev/null +++ b/acceptance/bundle/resources/jobs/num_workers/script @@ -0,0 +1,6 @@ +envsubst < databricks.yml.tmpl > databricks.yml +trace $CLI bundle deploy +trace print_requests.py //jobs + +trace $CLI bundle plan +rm out.requests.txt diff --git a/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_dev.direct.json b/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_dev.direct.json index 60d2bf1af5..24eaa0fdf6 100644 --- a/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_dev.direct.json +++ b/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_dev.direct.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } @@ -130,7 +129,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_prod.direct.json b/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_prod.direct.json index d98c983571..78693949b7 100644 --- a/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_prod.direct.json +++ b/acceptance/bundle/templates/default-python/classic/out.plan_after_deploy_prod.direct.json @@ -31,7 +31,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/classic/out.plan_dev.direct.json b/acceptance/bundle/templates/default-python/classic/out.plan_dev.direct.json index 17334f3a97..ad4351ea5d 100644 --- a/acceptance/bundle/templates/default-python/classic/out.plan_dev.direct.json +++ b/acceptance/bundle/templates/default-python/classic/out.plan_dev.direct.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/classic/out.plan_prod.direct.json b/acceptance/bundle/templates/default-python/classic/out.plan_prod.direct.json index 834df48f4a..c10d5b405a 100644 --- a/acceptance/bundle/templates/default-python/classic/out.plan_prod.direct.json +++ b/acceptance/bundle/templates/default-python/classic/out.plan_prod.direct.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/classic/out.requests.dev.direct.txt b/acceptance/bundle/templates/default-python/classic/out.requests.dev.direct.txt index 436c881470..8aef094197 100644 --- a/acceptance/bundle/templates/default-python/classic/out.requests.dev.direct.txt +++ b/acceptance/bundle/templates/default-python/classic/out.requests.dev.direct.txt @@ -106,7 +106,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/classic/out.requests.prod.direct.txt b/acceptance/bundle/templates/default-python/classic/out.requests.prod.direct.txt index 59b11395df..b774a9e716 100644 --- a/acceptance/bundle/templates/default-python/classic/out.requests.prod.direct.txt +++ b/acceptance/bundle/templates/default-python/classic/out.requests.prod.direct.txt @@ -109,7 +109,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/integration_classic/out.plan_dev.direct.json b/acceptance/bundle/templates/default-python/integration_classic/out.plan_dev.direct.json index 9762055ef3..e0d3bd24ed 100644 --- a/acceptance/bundle/templates/default-python/integration_classic/out.plan_dev.direct.json +++ b/acceptance/bundle/templates/default-python/integration_classic/out.plan_dev.direct.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/integration_classic/out.plan_prod.direct.json b/acceptance/bundle/templates/default-python/integration_classic/out.plan_prod.direct.json index 947342a08c..d3806088f9 100644 --- a/acceptance/bundle/templates/default-python/integration_classic/out.plan_prod.direct.json +++ b/acceptance/bundle/templates/default-python/integration_classic/out.plan_prod.direct.json @@ -26,7 +26,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/integration_classic/out.validate.dev.json b/acceptance/bundle/templates/default-python/integration_classic/out.validate.dev.json index be1f493d64..e6bac5b9c2 100644 --- a/acceptance/bundle/templates/default-python/integration_classic/out.validate.dev.json +++ b/acceptance/bundle/templates/default-python/integration_classic/out.validate.dev.json @@ -60,7 +60,6 @@ }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "16.4.x-scala2.12" } } diff --git a/acceptance/bundle/templates/default-python/integration_classic/output.txt b/acceptance/bundle/templates/default-python/integration_classic/output.txt index 51638f24bb..0b027d94b1 100644 --- a/acceptance/bundle/templates/default-python/integration_classic/output.txt +++ b/acceptance/bundle/templates/default-python/integration_classic/output.txt @@ -64,7 +64,7 @@ Resources: + "id": "[NUMID]", "job_clusters": [ { -@@ -139,5 +140,6 @@ +@@ -138,5 +139,6 @@ "unit": "DAYS" } - } @@ -72,13 +72,13 @@ Resources: + "url": "[DATABRICKS_URL]/jobs/[NUMID]" } }, -@@ -156,4 +158,5 @@ +@@ -155,4 +157,5 @@ ] }, + "id": "[UUID]", "libraries": [ { -@@ -168,5 +171,6 @@ +@@ -167,5 +170,6 @@ "tags": { "dev": "[USERNAME]" - } @@ -170,7 +170,7 @@ Validation OK! + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/prod/state/metadata.json" }, "edit_mode": "UI_LOCKED", -@@ -66,6 +56,6 @@ +@@ -65,6 +55,6 @@ } ], - "max_concurrent_runs": 4, @@ -179,7 +179,7 @@ Validation OK! + "name": "sample_job", "parameters": [ { -@@ -74,13 +64,16 @@ +@@ -73,13 +63,16 @@ }, { - "default": "[USERNAME]", @@ -200,28 +200,28 @@ Validation OK! - "dev": "[USERNAME]" }, "tasks": [ -@@ -93,5 +86,5 @@ +@@ -92,5 +85,5 @@ ], "notebook_task": { - "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/sample_notebook" + "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/prod/files/src/sample_notebook" }, "task_key": "notebook_task" -@@ -116,5 +109,5 @@ +@@ -115,5 +108,5 @@ "hive_metastore", "--schema", - "[USERNAME]" + "prod" ] }, -@@ -134,5 +127,5 @@ +@@ -133,5 +126,5 @@ ], "trigger": { - "pause_status": "PAUSED", + "pause_status": "UNPAUSED", "periodic": { "interval": 1, -@@ -147,11 +140,10 @@ +@@ -146,11 +139,10 @@ "deployment": { "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/state/metadata.json" @@ -235,7 +235,7 @@ Validation OK! + "--editable /Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/prod/files" ] }, -@@ -159,14 +151,17 @@ +@@ -158,14 +150,17 @@ { "glob": { - "include": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/project_name_[UNIQUE_NAME]_etl/transformations/**" @@ -260,7 +260,7 @@ Validation OK! + "schema": "prod" } } -@@ -184,16 +179,16 @@ +@@ -183,16 +178,16 @@ }, "schema": { - "default": "[USERNAME]", @@ -370,7 +370,7 @@ Resources: + "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/prod/state/metadata.json" }, "edit_mode": "UI_LOCKED", -@@ -67,6 +57,6 @@ +@@ -66,6 +56,6 @@ } ], - "max_concurrent_runs": 4, @@ -379,7 +379,7 @@ Resources: + "name": "sample_job", "parameters": [ { -@@ -75,13 +65,16 @@ +@@ -74,13 +64,16 @@ }, { - "default": "[USERNAME]", @@ -400,28 +400,28 @@ Resources: - "dev": "[USERNAME]" }, "tasks": [ -@@ -94,5 +87,5 @@ +@@ -93,5 +86,5 @@ ], "notebook_task": { - "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/sample_notebook" + "notebook_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/prod/files/src/sample_notebook" }, "task_key": "notebook_task" -@@ -117,5 +110,5 @@ +@@ -116,5 +109,5 @@ "hive_metastore", "--schema", - "[USERNAME]" + "prod" ] }, -@@ -135,5 +128,5 @@ +@@ -134,5 +127,5 @@ ], "trigger": { - "pause_status": "PAUSED", + "pause_status": "UNPAUSED", "periodic": { "interval": 1, -@@ -149,11 +142,10 @@ +@@ -148,11 +141,10 @@ "deployment": { "kind": "BUNDLE", - "metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/state/metadata.json" @@ -435,7 +435,7 @@ Resources: + "--editable /Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/prod/files" ] }, -@@ -162,14 +154,17 @@ +@@ -161,14 +153,17 @@ { "glob": { - "include": "/Workspace/Users/[USERNAME]/.bundle/project_name_[UNIQUE_NAME]/dev/files/src/project_name_[UNIQUE_NAME]_etl/transformations/**" @@ -460,7 +460,7 @@ Resources: + "schema": "prod", "url": "[DATABRICKS_URL]/pipelines/[UUID]" } -@@ -188,16 +183,16 @@ +@@ -187,16 +182,16 @@ }, "schema": { - "default": "[USERNAME]", diff --git a/acceptance/bundle/templates/experimental-jobs-as-code/output.txt b/acceptance/bundle/templates/experimental-jobs-as-code/output.txt index 28b4340a72..5a978ab806 100644 --- a/acceptance/bundle/templates/experimental-jobs-as-code/output.txt +++ b/acceptance/bundle/templates/experimental-jobs-as-code/output.txt @@ -31,7 +31,6 @@ Warning: Ignoring Databricks CLI version constraint for development build. Requi }, "data_security_mode": "SINGLE_USER", "node_type_id": "[NODE_TYPE_ID]", - "num_workers": 0, "spark_version": "15.4.x-scala2.12" } } diff --git a/bundle/config/mutator/resourcemutator/cluster_fixups.go b/bundle/config/mutator/resourcemutator/cluster_fixups.go index 11a77e234f..893cd248aa 100644 --- a/bundle/config/mutator/resourcemutator/cluster_fixups.go +++ b/bundle/config/mutator/resourcemutator/cluster_fixups.go @@ -2,6 +2,7 @@ package resourcemutator import ( "context" + "slices" "github.com/databricks/cli/bundle" "github.com/databricks/cli/libs/diag" @@ -97,5 +98,19 @@ func prepareJobSettingsForUpdate(js *jobs.JobSettings) { } for ind := range js.JobClusters { ModifyRequestOnInstancePool(&js.JobClusters[ind].NewCluster) + initializeNumWorkers(&js.JobClusters[ind].NewCluster) } } + +func initializeNumWorkers(c *compute.ClusterSpec) { + if c.Autoscale != nil { + return + } + if c.NumWorkers != 0 { + return + } + if slices.Contains(c.ForceSendFields, "NumWorkers") { + return + } + c.ForceSendFields = append(c.ForceSendFields, "NumWorkers") +} diff --git a/bundle/config/mutator/resourcemutator/resource_mutator.go b/bundle/config/mutator/resourcemutator/resource_mutator.go index c62eae0b2b..d716de41be 100644 --- a/bundle/config/mutator/resourcemutator/resource_mutator.go +++ b/bundle/config/mutator/resourcemutator/resource_mutator.go @@ -40,8 +40,6 @@ func applyInitializeMutators(ctx context.Context, b *bundle.Bundle) { // ApplyPresets should have more priority than defaults below, so it should be run first ApplyPresets(), - - validate.SingleNodeCluster(), ) if logdiag.HasError(ctx) { @@ -73,8 +71,6 @@ func applyInitializeMutators(ctx context.Context, b *bundle.Bundle) { {"resources.jobs.*.task[*].for_each_task.task.dbt_task.schema", "default"}, // https://github.com/databricks/terraform-provider-databricks/blob/v1.75.0/clusters/resource_cluster.go - // This triggers SingleNodeCluster() cluster validator. It needs to be run before applying defaults. - {"resources.jobs.*.job_clusters[*].new_cluster.num_workers", 0}, {"resources.jobs.*.job_clusters[*].new_cluster.workload_type.clients.notebooks", true}, {"resources.jobs.*.job_clusters[*].new_cluster.workload_type.clients.jobs", true}, @@ -137,6 +133,9 @@ func applyNormalizeMutators(ctx context.Context, b *bundle.Bundle) { bundle.ApplySeqContext( ctx, b, + + validate.SingleNodeCluster(), + // Reads (dynamic): * (strings) (searches for variable references in string values) // Updates (dynamic): resources.* (strings) (resolves variable references to their actual values) // Resolves variable references in 'resources' using bundle, workspace, and variables prefixes