Skip to content

Commit 592e111

Browse files
Update filenames used by bundle generate to use .<resource-type>.yml (#1901)
## Changes Update filenames used by bundle generate to use '.resource-type.yml' Similar to [Add sub-extension to resource files in built-in templates by shreyas-goenka · Pull Request #1777 · databricks/cli](#1777) --------- Co-authored-by: shreyas-goenka <[email protected]>
1 parent fab3e8f commit 592e111

File tree

4 files changed

+147
-5
lines changed

4 files changed

+147
-5
lines changed

cmd/bundle/generate/generate_test.go

Lines changed: 120 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,10 @@ package generate
33
import (
44
"bytes"
55
"context"
6+
"errors"
67
"fmt"
78
"io"
9+
"io/fs"
810
"os"
911
"path/filepath"
1012
"testing"
@@ -90,7 +92,7 @@ func TestGeneratePipelineCommand(t *testing.T) {
9092
err := cmd.RunE(cmd, []string{})
9193
require.NoError(t, err)
9294

93-
data, err := os.ReadFile(filepath.Join(configDir, "test_pipeline.yml"))
95+
data, err := os.ReadFile(filepath.Join(configDir, "test_pipeline.pipeline.yml"))
9496
require.NoError(t, err)
9597
require.Equal(t, fmt.Sprintf(`resources:
9698
pipelines:
@@ -186,7 +188,123 @@ func TestGenerateJobCommand(t *testing.T) {
186188
err := cmd.RunE(cmd, []string{})
187189
require.NoError(t, err)
188190

189-
data, err := os.ReadFile(filepath.Join(configDir, "test_job.yml"))
191+
data, err := os.ReadFile(filepath.Join(configDir, "test_job.job.yml"))
192+
require.NoError(t, err)
193+
194+
require.Equal(t, fmt.Sprintf(`resources:
195+
jobs:
196+
test_job:
197+
name: test-job
198+
job_clusters:
199+
- new_cluster:
200+
custom_tags:
201+
"Tag1": "24X7-1234"
202+
- new_cluster:
203+
spark_conf:
204+
"spark.databricks.delta.preview.enabled": "true"
205+
tasks:
206+
- task_key: notebook_task
207+
notebook_task:
208+
notebook_path: %s
209+
parameters:
210+
- name: empty
211+
default: ""
212+
`, filepath.Join("..", "src", "notebook.py")), string(data))
213+
214+
data, err = os.ReadFile(filepath.Join(srcDir, "notebook.py"))
215+
require.NoError(t, err)
216+
require.Equal(t, "# Databricks notebook source\nNotebook content", string(data))
217+
}
218+
219+
func touchEmptyFile(t *testing.T, path string) {
220+
err := os.MkdirAll(filepath.Dir(path), 0700)
221+
require.NoError(t, err)
222+
f, err := os.Create(path)
223+
require.NoError(t, err)
224+
f.Close()
225+
}
226+
227+
func TestGenerateJobCommandOldFileRename(t *testing.T) {
228+
cmd := NewGenerateJobCommand()
229+
230+
root := t.TempDir()
231+
b := &bundle.Bundle{
232+
BundleRootPath: root,
233+
}
234+
235+
m := mocks.NewMockWorkspaceClient(t)
236+
b.SetWorkpaceClient(m.WorkspaceClient)
237+
238+
jobsApi := m.GetMockJobsAPI()
239+
jobsApi.EXPECT().Get(mock.Anything, jobs.GetJobRequest{JobId: 1234}).Return(&jobs.Job{
240+
Settings: &jobs.JobSettings{
241+
Name: "test-job",
242+
JobClusters: []jobs.JobCluster{
243+
{NewCluster: compute.ClusterSpec{
244+
CustomTags: map[string]string{
245+
"Tag1": "24X7-1234",
246+
},
247+
}},
248+
{NewCluster: compute.ClusterSpec{
249+
SparkConf: map[string]string{
250+
"spark.databricks.delta.preview.enabled": "true",
251+
},
252+
}},
253+
},
254+
Tasks: []jobs.Task{
255+
{
256+
TaskKey: "notebook_task",
257+
NotebookTask: &jobs.NotebookTask{
258+
NotebookPath: "/test/notebook",
259+
},
260+
},
261+
},
262+
Parameters: []jobs.JobParameterDefinition{
263+
{
264+
Name: "empty",
265+
Default: "",
266+
},
267+
},
268+
},
269+
}, nil)
270+
271+
workspaceApi := m.GetMockWorkspaceAPI()
272+
workspaceApi.EXPECT().GetStatusByPath(mock.Anything, "/test/notebook").Return(&workspace.ObjectInfo{
273+
ObjectType: workspace.ObjectTypeNotebook,
274+
Language: workspace.LanguagePython,
275+
Path: "/test/notebook",
276+
}, nil)
277+
278+
notebookContent := io.NopCloser(bytes.NewBufferString("# Databricks notebook source\nNotebook content"))
279+
workspaceApi.EXPECT().Download(mock.Anything, "/test/notebook", mock.Anything).Return(notebookContent, nil)
280+
281+
cmd.SetContext(bundle.Context(context.Background(), b))
282+
cmd.Flag("existing-job-id").Value.Set("1234")
283+
284+
configDir := filepath.Join(root, "resources")
285+
cmd.Flag("config-dir").Value.Set(configDir)
286+
287+
srcDir := filepath.Join(root, "src")
288+
cmd.Flag("source-dir").Value.Set(srcDir)
289+
290+
var key string
291+
cmd.Flags().StringVar(&key, "key", "test_job", "")
292+
293+
// Create an old generated file first
294+
oldFilename := filepath.Join(configDir, "test_job.yml")
295+
touchEmptyFile(t, oldFilename)
296+
297+
// Having an existing files require --force flag to regenerate them
298+
cmd.Flag("force").Value.Set("true")
299+
300+
err := cmd.RunE(cmd, []string{})
301+
require.NoError(t, err)
302+
303+
// Make sure file do not exists after the run
304+
_, err = os.Stat(oldFilename)
305+
require.True(t, errors.Is(err, fs.ErrNotExist))
306+
307+
data, err := os.ReadFile(filepath.Join(configDir, "test_job.job.yml"))
190308
require.NoError(t, err)
191309

192310
require.Equal(t, fmt.Sprintf(`resources:

cmd/bundle/generate/job.go

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
package generate
22

33
import (
4+
"errors"
45
"fmt"
6+
"io/fs"
57
"os"
68
"path/filepath"
79

@@ -83,7 +85,17 @@ func NewGenerateJobCommand() *cobra.Command {
8385
return err
8486
}
8587

86-
filename := filepath.Join(configDir, fmt.Sprintf("%s.yml", jobKey))
88+
oldFilename := filepath.Join(configDir, fmt.Sprintf("%s.yml", jobKey))
89+
filename := filepath.Join(configDir, fmt.Sprintf("%s.job.yml", jobKey))
90+
91+
// User might continuously run generate command to update their bundle jobs with any changes made in Databricks UI.
92+
// Due to changing in the generated file names, we need to first rename existing resource file to the new name.
93+
// Otherwise users can end up with duplicated resources.
94+
err = os.Rename(oldFilename, filename)
95+
if err != nil && !errors.Is(err, fs.ErrNotExist) {
96+
return fmt.Errorf("failed to rename file %s. DABs uses the resource type as a sub-extension for generated content, please rename it to %s, err: %w", oldFilename, filename, err)
97+
}
98+
8799
saver := yamlsaver.NewSaverWithStyle(map[string]yaml.Style{
88100
// Including all JobSettings and nested fields which are map[string]string type
89101
"spark_conf": yaml.DoubleQuotedStyle,

cmd/bundle/generate/pipeline.go

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
package generate
22

33
import (
4+
"errors"
45
"fmt"
6+
"io/fs"
57
"os"
68
"path/filepath"
79

@@ -83,7 +85,17 @@ func NewGeneratePipelineCommand() *cobra.Command {
8385
return err
8486
}
8587

86-
filename := filepath.Join(configDir, fmt.Sprintf("%s.yml", pipelineKey))
88+
oldFilename := filepath.Join(configDir, fmt.Sprintf("%s.yml", pipelineKey))
89+
filename := filepath.Join(configDir, fmt.Sprintf("%s.pipeline.yml", pipelineKey))
90+
91+
// User might continuously run generate command to update their bundle jobs with any changes made in Databricks UI.
92+
// Due to changing in the generated file names, we need to first rename existing resource file to the new name.
93+
// Otherwise users can end up with duplicated resources.
94+
err = os.Rename(oldFilename, filename)
95+
if err != nil && !errors.Is(err, fs.ErrNotExist) {
96+
return fmt.Errorf("failed to rename file %s. DABs uses the resource type as a sub-extension for generated content, please rename it to %s, err: %w", oldFilename, filename, err)
97+
}
98+
8799
saver := yamlsaver.NewSaverWithStyle(
88100
// Including all PipelineSpec and nested fields which are map[string]string type
89101
map[string]yaml.Style{

internal/bundle/bind_resource_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ func TestAccGenerateAndBind(t *testing.T) {
166166
_, err = os.Stat(filepath.Join(bundleRoot, "src", "test.py"))
167167
require.NoError(t, err)
168168

169-
matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.yml"))
169+
matches, err := filepath.Glob(filepath.Join(bundleRoot, "resources", "test_job_key.job.yml"))
170170
require.NoError(t, err)
171171

172172
require.Len(t, matches, 1)

0 commit comments

Comments
 (0)