Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down Expand Up @@ -130,7 +129,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down Expand Up @@ -231,10 +230,6 @@
"action": "skip",
"reason": "server_side_default"
},
"job_clusters[0].new_cluster.num_workers": {
"action": "update",
"old": 0
},
"tasks[task_key='notebook_task'].libraries[0].whl": {
"action": "update",
"old": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][2]-py3-none-any.whl",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
8 changes: 0 additions & 8 deletions acceptance/bundle/migrate/default-python/output.txt
Original file line number Diff line number Diff line change
Expand Up @@ -82,10 +82,6 @@ Building python_artifact...
"action": "skip",
"reason": "server_side_default"
},
"job_clusters[0].new_cluster.num_workers": {
"action": "update",
"old": 0
},
"tasks[task_key='notebook_task'].libraries[0].whl": {
"action": "update",
"old": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl",
Expand Down Expand Up @@ -160,10 +156,6 @@ Building python_artifact...
"action": "skip",
"reason": "server_side_default"
},
"job_clusters[0].new_cluster.num_workers": {
"action": "update",
"old": 0
},
"tasks[task_key='notebook_task'].libraries[0].whl": {
"action": "update",
"old": "/Workspace/Users/[USERNAME]/.bundle/my_default_python/dev/artifacts/.internal/my_default_python-0.0.1+[UNIX_TIME_NANOS][1]-py3-none-any.whl",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
{
"job_cluster_key": "key",
"new_cluster": {
"num_workers": 0,
"spark_version": "13.3.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,6 @@
{
"job_cluster_key": "key",
"new_cluster": {
"num_workers": 0,
"spark_version": "13.3.x-scala2.12"
}
}
Expand All @@ -125,6 +124,10 @@
"action": "skip",
"reason": "server_side_default"
},
"job_clusters[0].new_cluster.num_workers": {
"action": "update",
"old": 0
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@denik Should new be null here?

Copy link
Contributor Author

@denik denik Dec 4, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

No, absence is expected.

It's untyped nil internally which is correctly rendered as omitted with omitempty.

},
"timeout_seconds": {
"action": "skip",
"reason": "server_side_default"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ Destroy complete!
{
"job_cluster_key": "key",
"new_cluster": {
"num_workers": 0,
"spark_version": "13.3.x-scala2.12"
}
}
Expand Down
4 changes: 0 additions & 4 deletions acceptance/bundle/resources/jobs/create-error/output.txt
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@

>>> musterr [CLI] bundle deploy --force-lock
Warning: required field "new_cluster" is not set
at resources.jobs.foo.job_clusters[0]
in databricks.yml:7:11

Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
Deploying resources...
Error: cannot create resources.jobs.foo: Shared job cluster feature is only supported in multi-task jobs. (400 INVALID_PARAMETER_VALUE)
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
bundle:
name: test-bundle

resources:
jobs:
sample_job:
name: sample_job

trigger:
# Run this job every day, exactly one day from the last run; see https://docs.databricks.com/api/workspace/jobs/create#trigger
periodic:
interval: 1
unit: DAYS

tasks:
- task_key: notebook_task
notebook_task:
notebook_path: sample_notebook.py
source: WORKSPACE # Without this, there is a different request between direct and terraform

job_clusters:
- job_cluster_key: job_cluster_autoscale
new_cluster:
spark_version: 16.4.x-scala2.12
node_type_id: $NODE_TYPE_ID
data_security_mode: SINGLE_USER
autoscale:
min_workers: 1
max_workers: 4

# This config results in different request between terraform and direct:
# Terraform removes "num_workers: 0" and direct sends it as is.
# This is acceptable difference, users will get appropriate error message backend and can correct their config.
#
#- job_cluster_key: job_cluster_autoscale_num_workers0
# new_cluster:
# spark_version: 16.4.x-scala2.13
# node_type_id: $NODE_TYPE_ID
# data_security_mode: SINGLE_USER
# autoscale:
# min_workers: 1
# max_workers: 4
# num_workers: 0

- job_cluster_key: job_cluster_autoscale_num_workers1
new_cluster:
spark_version: 16.4.x-scala2.14
node_type_id: $NODE_TYPE_ID
data_security_mode: SINGLE_USER
autoscale:
min_workers: 1
max_workers: 4
num_workers: 1

- job_cluster_key: job_cluster_num_workers1
new_cluster:
spark_version: 16.4.x-scala2.15
node_type_id: $NODE_TYPE_ID
data_security_mode: SINGLE_USER
num_workers: 1

- job_cluster_key: job_cluster_num_workers0
new_cluster:
spark_version: 16.4.x-scala2.16
node_type_id: $NODE_TYPE_ID
data_security_mode: SINGLE_USER
num_workers: 0

- job_cluster_key: job_cluster_default
new_cluster:
spark_version: 16.4.x-scala2.17
5 changes: 5 additions & 0 deletions acceptance/bundle/resources/jobs/num_workers/out.test.toml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

128 changes: 128 additions & 0 deletions acceptance/bundle/resources/jobs/num_workers/output.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@

>>> [CLI] bundle deploy
Warning: Single node cluster is not correctly configured
at resources.jobs.sample_job.job_clusters[3].new_cluster
in databricks.yml:64:13

num_workers should be 0 only for single-node clusters. To create a
valid single node cluster please ensure that the following properties
are correctly set in the cluster specification:

spark_conf:
spark.databricks.cluster.profile: singleNode
spark.master: local[*]

custom_tags:
ResourceClass: SingleNode


Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
Deploying resources...
Updating deployment state...
Deployment complete!

>>> print_requests.py //jobs
{
"method": "POST",
"path": "/api/2.2/jobs/create",
"body": {
"deployment": {
"kind": "BUNDLE",
"metadata_file_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/state/metadata.json"
},
"edit_mode": "UI_LOCKED",
"format": "MULTI_TASK",
"job_clusters": [
{
"job_cluster_key": "job_cluster_autoscale",
"new_cluster": {
"autoscale": {
"max_workers": 4,
"min_workers": 1
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"spark_version": "16.4.x-scala2.12"
}
},
{
"job_cluster_key": "job_cluster_autoscale_num_workers1",
"new_cluster": {
"autoscale": {
"max_workers": 4,
"min_workers": 1
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 1,
"spark_version": "16.4.x-scala2.14"
}
},
{
"job_cluster_key": "job_cluster_num_workers1",
"new_cluster": {
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 1,
"spark_version": "16.4.x-scala2.15"
}
},
{
"job_cluster_key": "job_cluster_num_workers0",
"new_cluster": {
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.16"
}
},
{
"job_cluster_key": "job_cluster_default",
"new_cluster": {
"num_workers": 0,
"spark_version": "16.4.x-scala2.17"
}
}
],
"max_concurrent_runs": 1,
"name": "sample_job",
"queue": {
"enabled": true
},
"tasks": [
{
"notebook_task": {
"notebook_path": "/Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files/sample_notebook",
"source": "WORKSPACE"
},
"task_key": "notebook_task"
}
],
"trigger": {
"pause_status": "UNPAUSED",
"periodic": {
"interval": 1,
"unit": "DAYS"
}
}
}
}

>>> [CLI] bundle plan
Warning: Single node cluster is not correctly configured
at resources.jobs.sample_job.job_clusters[3].new_cluster
in databricks.yml:64:13

num_workers should be 0 only for single-node clusters. To create a
valid single node cluster please ensure that the following properties
are correctly set in the cluster specification:

spark_conf:
spark.databricks.cluster.profile: singleNode
spark.master: local[*]

custom_tags:
ResourceClass: SingleNode


Plan: 0 to add, 0 to change, 0 to delete, 1 unchanged
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
# Databricks notebook source
6 changes: 6 additions & 0 deletions acceptance/bundle/resources/jobs/num_workers/script
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
envsubst < databricks.yml.tmpl > databricks.yml
trace $CLI bundle deploy
trace print_requests.py //jobs

trace $CLI bundle plan
rm out.requests.txt
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down Expand Up @@ -130,7 +129,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
},
"data_security_mode": "SINGLE_USER",
"node_type_id": "[NODE_TYPE_ID]",
"num_workers": 0,
"spark_version": "16.4.x-scala2.12"
}
}
Expand Down
Loading