File tree Expand file tree Collapse file tree 5 files changed +57
-0
lines changed
acceptance/bundle/resources/jobs/create-error Expand file tree Collapse file tree 5 files changed +57
-0
lines changed Original file line number Diff line number Diff line change 1+ resources :
2+ jobs :
3+ foo :
4+ name : foo
5+
6+ trigger :
7+ periodic :
8+ interval : 1
9+ unit : DAYS
10+
11+ job_clusters :
12+ - job_cluster_key : key
Original file line number Diff line number Diff line change 1+ Local = true
2+ Cloud = false
3+
4+ [EnvMatrix ]
5+ DATABRICKS_CLI_DEPLOYMENT = [" direct-exp" ]
Original file line number Diff line number Diff line change 1+
2+ >>> musterr [CLI] bundle deploy --force-lock
3+ Warning: required field "new_cluster" is not set
4+ at resources.jobs.foo.job_clusters[0]
5+ in databricks.yml:7:11
6+
7+ Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
8+ Deploying resources...
9+ Error: deploying jobs.foo: creating: Method=Jobs.Create *retries.Err *apierr.APIError StatusCode=400 ErrorCode="INVALID_PARAMETER_VALUE" Message="Shared job cluster feature is only supported in multi-task jobs."
10+
11+ Updating deployment state...
12+
13+ Exit code (musterr): 1
Original file line number Diff line number Diff line change 1+ trace musterr $CLI bundle deploy --force-lock
Original file line number Diff line number Diff line change 1+ RecordRequests = false
2+
3+ # Uncomment and run the test to check that the error response is realistic
4+ # deco env run -i -n aws-prod-ucws -- go test ../../../.. -run ^TestAccept$/^bundle$/^resources$/^jobs$/^create-error$ -timeout=1h
5+ # Cloud = true
6+
7+ # The error on terraform looks different, it does not pass the validation there:
8+ # Warning: required field "new_cluster" is not set
9+ # at resources.jobs.foo.job_clusters[0]
10+ # in databricks.yml:7:11
11+ #
12+ # Uploading bundle files to /Workspace/Users/[USERNAME]/.bundle/test-bundle/default/files...
13+ # Error: exit status 1
14+ #
15+ # Error: Insufficient new_cluster blocks
16+ #
17+ # on bundle.tf.json line 25, in resource.databricks_job.foo.job_cluster[0]:
18+ # 25: }
19+ #
20+ # At least 1 "new_cluster" blocks are required.
21+ EnvMatrix.DATABRICKS_CLI_DEPLOYMENT = [" direct-exp" ]
22+
23+ [[Server ]]
24+ Pattern = " POST /api/2.2/jobs/create"
25+ Response.StatusCode = 400
26+ Response.Body = ' {"error_code": "INVALID_PARAMETER_VALUE", "message": "Shared job cluster feature is only supported in multi-task jobs."}'
You can’t perform that action at this time.
0 commit comments