Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
8a0c3ea
refactor: Simplify initTestTemplate to fail tests directly
denik Dec 13, 2024
4e1de16
fix: Resolve linting errors in bundle integration tests
denik Dec 13, 2024
de5db28
refactor: Fix error handling and cleanup in bind resource test
denik Dec 13, 2024
890966a
fix: Resolve linting errors in bundle test files
denik Dec 13, 2024
3b1bd74
fix: Remove unnecessary err variable from initTestTemplate calls
denik Dec 13, 2024
9065245
fix: Resolve linting errors in bundle integration tests
denik Dec 13, 2024
80f0f8e
fix: Resolve linting errors in bundle integration tests
denik Dec 13, 2024
f922b96
refactor: Fix error variable declarations in test files
denik Dec 13, 2024
bc5ede9
refactor: Remove redundant error checks and simplify error assignments
denik Dec 13, 2024
d9bec7d
clean up after AI
denik Dec 13, 2024
76fffea
refactor: Update deployBundle calls to remove error handling
denik Dec 13, 2024
6cff089
feat: Remove error handling for deployBundle in integration tests
denik Dec 13, 2024
b728b2b
refactor: Remove unnecessary error handling in test functions
denik Dec 16, 2024
46fb40a
refactor: Fix linting errors in bundle integration tests
denik Dec 16, 2024
ed0127f
refactor: Fix linting issues in bundle integration tests
denik Dec 16, 2024
de20541
refactor: Fix linting issues in bundle integration tests
denik Dec 16, 2024
d8bf995
fix linting issues
denik Dec 16, 2024
7b8dd11
Based on the changes, here's a concise commit message:
denik Dec 16, 2024
05c5766
refactor: Remove unnecessary error handling in bundle test cleanup
denik Dec 16, 2024
e42d5fa
fix: Remove unnecessary error assignment in destroyBundle call
denik Dec 16, 2024
fa9b050
clean up unnecessary require.NoError; use require.NoError instead of …
denik Dec 16, 2024
577f6a5
fix dashboards_test.go - in one case we actually required to have an …
denik Dec 16, 2024
7ea1009
fix syntax error
denik Dec 16, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 2 additions & 4 deletions integration/bundle/artifacts_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -247,12 +247,11 @@ func TestUploadArtifactFileToVolumeThatDoesNotExist(t *testing.T) {
require.NoError(t, err)
})

bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{
"unique_id": uuid.New().String(),
"schema_name": schemaName,
"volume_name": "doesnotexist",
})
require.NoError(t, err)

ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := testcli.RequireErrorRun(t, ctx, "bundle", "deploy")
Expand Down Expand Up @@ -284,12 +283,11 @@ func TestUploadArtifactToVolumeNotYetDeployed(t *testing.T) {
require.NoError(t, err)
})

bundleRoot, err := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "artifact_path_with_volume", map[string]any{
"unique_id": uuid.New().String(),
"schema_name": schemaName,
"volume_name": "my_volume",
})
require.NoError(t, err)

ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot)
stdout, stderr, err := testcli.RequireErrorRun(t, ctx, "bundle", "deploy")
Expand Down
15 changes: 5 additions & 10 deletions integration/bundle/basic_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,27 +16,22 @@ func TestBasicBundleDeployWithFailOnActiveRuns(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root, err := initTestTemplate(t, ctx, "basic", map[string]any{
root := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)

t.Cleanup(func() {
err = destroyBundle(t, ctx, root)
require.NoError(t, err)
destroyBundle(t, ctx, root)
})

// deploy empty bundle
err = deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
require.NoError(t, err)
deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})

// Remove .databricks directory to simulate a fresh deployment
err = os.RemoveAll(filepath.Join(root, ".databricks"))
require.NoError(t, err)
require.NoError(t, os.RemoveAll(filepath.Join(root, ".databricks")))

// deploy empty bundle again
err = deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
require.NoError(t, err)
deployBundleWithFlags(t, ctx, root, []string{"--fail-on-active-runs"})
}
32 changes: 11 additions & 21 deletions integration/bundle/bind_resource_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,30 +23,27 @@ func TestBindJobToExistingJob(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,
"spark_version": "13.3.x-scala2.12",
"node_type_id": nodeTypeId,
})
require.NoError(t, err)

jobId := gt.createTestJob(ctx)
t.Cleanup(func() {
gt.destroyJob(ctx, jobId)
require.NoError(t, err)
})

ctx = env.Set(ctx, "BUNDLE_ROOT", bundleRoot)
c := testcli.NewRunner(t, ctx, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId), "--auto-approve")
_, _, err = c.Run()
_, _, err := c.Run()
require.NoError(t, err)

// Remove .databricks directory to simulate a fresh deployment
err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks"))
require.NoError(t, err)

err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
Expand All @@ -67,8 +64,7 @@ func TestBindJobToExistingJob(t *testing.T) {
err = os.RemoveAll(filepath.Join(bundleRoot, ".databricks"))
require.NoError(t, err)

err = destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)

// Check that job is unbound and exists after bundle is destroyed
job, err = w.Jobs.Get(ctx, jobs.GetJobRequest{
Expand All @@ -85,18 +81,16 @@ func TestAbortBind(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "basic", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,
"spark_version": "13.3.x-scala2.12",
"node_type_id": nodeTypeId,
})
require.NoError(t, err)

jobId := gt.createTestJob(ctx)
t.Cleanup(func() {
gt.destroyJob(ctx, jobId)
err := destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)
})

// Bind should fail because prompting is not possible.
Expand All @@ -105,12 +99,11 @@ func TestAbortBind(t *testing.T) {
c := testcli.NewRunner(t, ctx, "bundle", "deployment", "bind", "foo", fmt.Sprint(jobId))

// Expect error suggesting to use --auto-approve
_, _, err = c.Run()
_, _, err := c.Run()
assert.ErrorContains(t, err, "failed to bind the resource")
assert.ErrorContains(t, err, "This bind operation requires user confirmation, but the current console does not support prompting. Please specify --auto-approve if you would like to skip prompts and proceed")

err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
Expand All @@ -130,10 +123,9 @@ func TestGenerateAndBind(t *testing.T) {
gt := &generateJobTest{T: wt, w: wt.W}

uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "with_includes", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "with_includes", map[string]any{
"unique_id": uniqueId,
})
require.NoError(t, err)

w, err := databricks.NewWorkspaceClient()
require.NoError(t, err)
Expand Down Expand Up @@ -169,11 +161,9 @@ func TestGenerateAndBind(t *testing.T) {
_, _, err = c.Run()
require.NoError(t, err)

err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

err = destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)

// Check that job is bound and does not extsts after bundle is destroyed
_, err = w.Jobs.Get(ctx, jobs.GetJobRequest{
Expand Down
9 changes: 3 additions & 6 deletions integration/bundle/clusters_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,16 +20,14 @@ func TestDeployBundleWithCluster(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root, err := initTestTemplate(t, ctx, "clusters", map[string]any{
root := initTestTemplate(t, ctx, "clusters", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)

t.Cleanup(func() {
err = destroyBundle(t, ctx, root)
require.NoError(t, err)
destroyBundle(t, ctx, root)

cluster, err := wt.W.Clusters.GetByClusterName(ctx, fmt.Sprintf("test-cluster-%s", uniqueId))
if err != nil {
Expand All @@ -39,8 +37,7 @@ func TestDeployBundleWithCluster(t *testing.T) {
}
})

err = deployBundle(t, ctx, root)
require.NoError(t, err)
deployBundle(t, ctx, root)

// Cluster should exists after bundle deployment
cluster, err := wt.W.Clusters.GetByClusterName(ctx, fmt.Sprintf("test-cluster-%s", uniqueId))
Expand Down
14 changes: 5 additions & 9 deletions integration/bundle/dashboards_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,19 +18,16 @@ func TestDashboards(t *testing.T) {

warehouseID := testutil.GetEnvOrSkipTest(t, "TEST_DEFAULT_WAREHOUSE_ID")
uniqueID := uuid.New().String()
root, err := initTestTemplate(t, ctx, "dashboards", map[string]any{
root := initTestTemplate(t, ctx, "dashboards", map[string]any{
"unique_id": uniqueID,
"warehouse_id": warehouseID,
})
require.NoError(t, err)

t.Cleanup(func() {
err = destroyBundle(t, ctx, root)
require.NoError(t, err)
destroyBundle(t, ctx, root)
})

err = deployBundle(t, ctx, root)
require.NoError(t, err)
deployBundle(t, ctx, root)

// Load bundle configuration by running the validate command.
b := unmarshalConfig(t, mustValidateBundle(t, ctx, root))
Expand All @@ -55,12 +52,11 @@ func TestDashboards(t *testing.T) {
require.NoError(t, err)

// Try to redeploy the bundle and confirm that the out of band modification is detected.
stdout, _, err := deployBundleWithArgs(t, ctx, root)
stdout, _, err := deployBundleWithArgsErr(t, ctx, root)
require.Error(t, err)
assert.Contains(t, stdout, `Error: dashboard "file_reference" has been modified remotely`+"\n")

// Redeploy the bundle with the --force flag and confirm that the out of band modification is ignored.
_, stderr, err := deployBundleWithArgs(t, ctx, root, "--force")
require.NoError(t, err)
_, stderr := deployBundleWithArgs(t, ctx, root, "--force")
assert.Contains(t, stderr, `Deployment complete!`+"\n")
}
42 changes: 14 additions & 28 deletions integration/bundle/deploy_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -25,17 +25,14 @@ import (
)

func setupUcSchemaBundle(t *testing.T, ctx context.Context, w *databricks.WorkspaceClient, uniqueId string) string {
bundleRoot, err := initTestTemplate(t, ctx, "uc_schema", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "uc_schema", map[string]any{
"unique_id": uniqueId,
})
require.NoError(t, err)

err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

t.Cleanup(func() {
err := destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)
})

// Assert the schema is created
Expand Down Expand Up @@ -97,8 +94,7 @@ func TestBundleDeployUcSchema(t *testing.T) {
require.NoError(t, err)

// Redeploy the bundle
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

// Assert the schema is deleted
_, err = w.Schemas.GetByFullName(ctx, strings.Join([]string{catalogName, schemaName}, "."))
Expand Down Expand Up @@ -135,16 +131,14 @@ func TestBundlePipelineDeleteWithoutAutoApprove(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)

// deploy pipeline
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

// assert pipeline is created
pipelineName := "test-bundle-pipeline-" + uniqueId
Expand Down Expand Up @@ -182,17 +176,14 @@ func TestBundlePipelineRecreateWithoutAutoApprove(t *testing.T) {
w := wt.W
uniqueId := uuid.New().String()

bundleRoot, err := initTestTemplate(t, ctx, "recreate_pipeline", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "recreate_pipeline", map[string]any{
"unique_id": uniqueId,
})
require.NoError(t, err)

err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

t.Cleanup(func() {
err := destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)
})

// Assert the pipeline is created
Expand Down Expand Up @@ -221,16 +212,14 @@ func TestDeployBasicBundleLogs(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
root, err := initTestTemplate(t, ctx, "basic", map[string]any{
root := initTestTemplate(t, ctx, "basic", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)

t.Cleanup(func() {
err = destroyBundle(t, ctx, root)
require.NoError(t, err)
destroyBundle(t, ctx, root)
})

currentUser, err := wt.W.CurrentUser.Me(ctx)
Expand All @@ -251,17 +240,14 @@ func TestDeployUcVolume(t *testing.T) {
w := wt.W

uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "volume", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "volume", map[string]any{
"unique_id": uniqueId,
})
require.NoError(t, err)

err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

t.Cleanup(func() {
err := destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)
})

// Assert the volume is created successfully
Expand Down
12 changes: 4 additions & 8 deletions integration/bundle/deploy_then_remove_resources_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,14 @@ func TestBundleDeployThenRemoveResources(t *testing.T) {

nodeTypeId := testutil.GetCloud(t).NodeTypeID()
uniqueId := uuid.New().String()
bundleRoot, err := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
bundleRoot := initTestTemplate(t, ctx, "deploy_then_remove_resources", map[string]any{
"unique_id": uniqueId,
"node_type_id": nodeTypeId,
"spark_version": defaultSparkVersion,
})
require.NoError(t, err)

// deploy pipeline
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

// assert pipeline is created
pipelineName := "test-bundle-pipeline-" + uniqueId
Expand All @@ -46,8 +44,7 @@ func TestBundleDeployThenRemoveResources(t *testing.T) {
require.NoError(t, err)

// deploy again
err = deployBundle(t, ctx, bundleRoot)
require.NoError(t, err)
deployBundle(t, ctx, bundleRoot)

// assert pipeline is deleted
_, err = w.Pipelines.GetByName(ctx, pipelineName)
Expand All @@ -58,7 +55,6 @@ func TestBundleDeployThenRemoveResources(t *testing.T) {
assert.ErrorContains(t, err, "does not exist")

t.Cleanup(func() {
err = destroyBundle(t, ctx, bundleRoot)
require.NoError(t, err)
destroyBundle(t, ctx, bundleRoot)
})
}
Loading
Loading