Skip to content

Commit ffe8f69

Browse files
authored
Fixed changing the source for the SQL task from GIT to WORKSPACE (#4114)
## Changes Fixed changing the source for the SQL task from GIT to WORKSPACE ## Why Fixes #4000 ## Tests Added an acceptance test <!-- If your PR needs to be included in the release notes for next release, add a separate entry in NEXT_CHANGELOG.md as part of your PR. -->
1 parent 4d986cb commit ffe8f69

File tree

8 files changed

+240
-1
lines changed

8 files changed

+240
-1
lines changed

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@
77
### CLI
88

99
### Bundles
10+
* Fixed changing the source for the SQL task from GIT to WORKSPACE ([#4114](https://github.com/databricks/cli/pull/4114))
1011
* `bundle deployment migrate` will now run a plan before migration to check if deployment was done ([#4088](https://github.com/databricks/cli/pull/4088))
1112
* engine/direct: Increase "serial" in state file after every deployment ([#4115](https://github.com/databricks/cli/pull/4115))
1213

acceptance/bundle/resources/jobs/task-source/databricks.yml

Lines changed: 18 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,24 @@ resources:
3535
notebook_task:
3636
notebook_path: ./notebook.py
3737
source: WORKSPACE
38+
- task_key: test_task_sql
39+
sql_task:
40+
warehouse_id: cafef00d
41+
file:
42+
path: ./sql.sql
43+
- task_key: test_task_dbt
44+
new_cluster:
45+
spark_version: 15.4.x-scala2.12
46+
num_workers: 0
47+
spark_conf:
48+
spark.databricks.cluster.profile: singleNode
49+
spark.master: local[*]
50+
custom_tags:
51+
ResourceClass: SingleNode
52+
dbt_task:
53+
project_directory: ./dbt_project
54+
commands:
55+
- "dbt run"
3856

3957
workspace_job:
4058
name: workspace_job
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
name: "test_project"
2+
version: "1.0.0"
3+
profile: "default"

acceptance/bundle/resources/jobs/task-source/out.git_job.direct.txt

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,26 @@
1616
"enabled": true
1717
},
1818
"tasks": [
19+
{
20+
"dbt_task": {
21+
"commands": [
22+
"dbt run"
23+
],
24+
"project_directory": "./dbt_project"
25+
},
26+
"new_cluster": {
27+
"custom_tags": {
28+
"ResourceClass": "SingleNode"
29+
},
30+
"num_workers": 0,
31+
"spark_conf": {
32+
"spark.databricks.cluster.profile": "singleNode",
33+
"spark.master": "local[*]"
34+
},
35+
"spark_version": "15.4.x-scala2.12"
36+
},
37+
"task_key": "test_task_dbt"
38+
},
1939
{
2040
"for_each_task": {
2141
"inputs": "[1]",
@@ -53,6 +73,15 @@
5373
"source": "WORKSPACE"
5474
},
5575
"task_key": "test_task_source_workspace"
76+
},
77+
{
78+
"sql_task": {
79+
"file": {
80+
"path": "./sql.sql"
81+
},
82+
"warehouse_id": "cafef00d"
83+
},
84+
"task_key": "test_task_sql"
5685
}
5786
]
5887
}
@@ -71,6 +100,26 @@
71100
"enabled": true
72101
},
73102
"tasks": [
103+
{
104+
"dbt_task": {
105+
"commands": [
106+
"dbt run"
107+
],
108+
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/task-source/default/files/dbt_project"
109+
},
110+
"new_cluster": {
111+
"custom_tags": {
112+
"ResourceClass": "SingleNode"
113+
},
114+
"num_workers": 0,
115+
"spark_conf": {
116+
"spark.databricks.cluster.profile": "singleNode",
117+
"spark.master": "local[*]"
118+
},
119+
"spark_version": "15.4.x-scala2.12"
120+
},
121+
"task_key": "test_task_dbt"
122+
},
74123
{
75124
"for_each_task": {
76125
"inputs": "[1]",
@@ -108,6 +157,15 @@
108157
"source": "WORKSPACE"
109158
},
110159
"task_key": "test_task_source_workspace"
160+
},
161+
{
162+
"sql_task": {
163+
"file": {
164+
"path": "/Workspace/Users/[USERNAME]/.bundle/task-source/default/files/sql.sql"
165+
},
166+
"warehouse_id": "cafef00d"
167+
},
168+
"task_key": "test_task_sql"
111169
}
112170
]
113171
}

acceptance/bundle/resources/jobs/task-source/out.git_job.terraform.txt

Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,28 @@
1616
"enabled": true
1717
},
1818
"tasks": [
19+
{
20+
"dbt_task": {
21+
"commands": [
22+
"dbt run"
23+
],
24+
"project_directory": "./dbt_project",
25+
"schema": "default",
26+
"source": "GIT"
27+
},
28+
"new_cluster": {
29+
"custom_tags": {
30+
"ResourceClass": "SingleNode"
31+
},
32+
"num_workers": 0,
33+
"spark_conf": {
34+
"spark.databricks.cluster.profile": "singleNode",
35+
"spark.master": "local[*]"
36+
},
37+
"spark_version": "15.4.x-scala2.12"
38+
},
39+
"task_key": "test_task_dbt"
40+
},
1941
{
2042
"for_each_task": {
2143
"inputs": "[1]",
@@ -55,6 +77,16 @@
5577
"source": "WORKSPACE"
5678
},
5779
"task_key": "test_task_source_workspace"
80+
},
81+
{
82+
"sql_task": {
83+
"file": {
84+
"path": "./sql.sql",
85+
"source": "GIT"
86+
},
87+
"warehouse_id": "cafef00d"
88+
},
89+
"task_key": "test_task_sql"
5890
}
5991
]
6092
}
@@ -77,6 +109,28 @@
77109
"user_name": "[USERNAME]"
78110
},
79111
"tasks": [
112+
{
113+
"dbt_task": {
114+
"commands": [
115+
"dbt run"
116+
],
117+
"project_directory": "/Workspace/Users/[USERNAME]/.bundle/task-source/default/files/dbt_project",
118+
"schema": "default",
119+
"source": "WORKSPACE"
120+
},
121+
"new_cluster": {
122+
"custom_tags": {
123+
"ResourceClass": "SingleNode"
124+
},
125+
"num_workers": 0,
126+
"spark_conf": {
127+
"spark.databricks.cluster.profile": "singleNode",
128+
"spark.master": "local[*]"
129+
},
130+
"spark_version": "15.4.x-scala2.12"
131+
},
132+
"task_key": "test_task_dbt"
133+
},
80134
{
81135
"for_each_task": {
82136
"inputs": "[1]",
@@ -116,6 +170,16 @@
116170
"source": "WORKSPACE"
117171
},
118172
"task_key": "test_task_source_workspace"
173+
},
174+
{
175+
"sql_task": {
176+
"file": {
177+
"path": "/Workspace/Users/[USERNAME]/.bundle/task-source/default/files/sql.sql",
178+
"source": "WORKSPACE"
179+
},
180+
"warehouse_id": "cafef00d"
181+
},
182+
"task_key": "test_task_sql"
119183
}
120184
],
121185
"webhook_notifications": {}
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
SELECT 1;

bundle/deploy/terraform/tfdyn/convert_job.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,9 +17,11 @@ import (
1717
// These are the task types that support the source field
1818
// https://docs.databricks.com/api/workspace/jobs/create
1919
var supportedTypeTasks = []string{
20-
"db_task",
20+
"dbt_task",
21+
"gen_ai_compute_task",
2122
"notebook_task",
2223
"spark_python_task",
24+
"sql_task.file",
2325
}
2426

2527
func setSourceIfNotSet(task dyn.Value, defaultSource jobs.Source) (dyn.Value, error) {

bundle/deploy/terraform/tfdyn/convert_job_test.go

Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,16 @@ package tfdyn
22

33
import (
44
"context"
5+
"reflect"
6+
"slices"
7+
"strings"
58
"testing"
69

710
"github.com/databricks/cli/bundle/config/resources"
811
"github.com/databricks/cli/bundle/internal/tf/schema"
912
"github.com/databricks/cli/libs/dyn"
1013
"github.com/databricks/cli/libs/dyn/convert"
14+
"github.com/databricks/cli/libs/textutil"
1115
"github.com/databricks/databricks-sdk-go/service/compute"
1216
"github.com/databricks/databricks-sdk-go/service/jobs"
1317
"github.com/stretchr/testify/assert"
@@ -286,3 +290,91 @@ func TestConvertJobApplyPolicyDefaultValues(t *testing.T) {
286290
},
287291
}, out.Job["my_job"])
288292
}
293+
294+
// TestSupportedTypeTasksComplete verifies that supportedTypeTasks includes all task types with a Source field.
295+
func TestSupportedTypeTasksComplete(t *testing.T) {
296+
// Use reflection to find all task types that have a Source field
297+
taskType := reflect.TypeOf(jobs.Task{})
298+
var tasksWithSource []string
299+
300+
for i := range taskType.NumField() {
301+
field := taskType.Field(i)
302+
303+
// Skip non-task fields (like DependsOn, Libraries, etc.)
304+
if !strings.HasSuffix(field.Name, "Task") {
305+
continue
306+
}
307+
308+
// Get the type of the task field (e.g., *NotebookTask)
309+
taskFieldType := field.Type
310+
if taskFieldType.Kind() == reflect.Ptr {
311+
taskFieldType = taskFieldType.Elem()
312+
}
313+
314+
if taskFieldType.Kind() != reflect.Struct {
315+
continue
316+
}
317+
318+
// Recursively search for Source fields in this task type
319+
// We only search one level deep to catch nested Source fields like sql_task.file
320+
taskName := textutil.CamelToSnakeCase(field.Name)
321+
sourcePaths := findSourceFieldsShallow(taskFieldType)
322+
for _, path := range sourcePaths {
323+
if path == "" {
324+
tasksWithSource = append(tasksWithSource, taskName)
325+
} else {
326+
tasksWithSource = append(tasksWithSource, taskName+"."+path)
327+
}
328+
}
329+
}
330+
331+
// Verify that all tasks with Source fields are in supportedTypeTasks
332+
slices.Sort(tasksWithSource)
333+
sortedSupported := make([]string, len(supportedTypeTasks))
334+
copy(sortedSupported, supportedTypeTasks)
335+
slices.Sort(sortedSupported)
336+
337+
assert.Equal(t, sortedSupported, tasksWithSource,
338+
"supportedTypeTasks must include all task types with a Source field. "+
339+
"If this test fails, update supportedTypeTasks in convert_job.go")
340+
}
341+
342+
// findSourceFieldsShallow searches for Source fields in a struct type, going only one level deep.
343+
// Returns a list of paths to Source fields (e.g., "" for direct Source, "file" for sql_task.file).
344+
func findSourceFieldsShallow(t reflect.Type) []string {
345+
if t.Kind() == reflect.Ptr {
346+
t = t.Elem()
347+
}
348+
349+
if t.Kind() != reflect.Struct {
350+
return nil
351+
}
352+
353+
var paths []string
354+
355+
for i := range t.NumField() {
356+
field := t.Field(i)
357+
358+
// Check if this field is named "Source"
359+
if field.Name == "Source" {
360+
paths = append(paths, "")
361+
continue
362+
}
363+
364+
// Only search one level deep in nested structs
365+
fieldType := field.Type
366+
if fieldType.Kind() == reflect.Ptr {
367+
fieldType = fieldType.Elem()
368+
}
369+
370+
if fieldType.Kind() == reflect.Struct {
371+
// Check if the nested struct has a Source field
372+
if _, hasSource := fieldType.FieldByName("Source"); hasSource {
373+
fieldName := textutil.CamelToSnakeCase(field.Name)
374+
paths = append(paths, fieldName)
375+
}
376+
}
377+
}
378+
379+
return paths
380+
}

0 commit comments

Comments
 (0)