Skip to content

Commit 0148dbb

Browse files
committed
* Relaxed force_new constraint on catalog attribute in databricks_pipeline resource to allow changing the default catalog for existing pipelines
Pipelines will still be recreated when switching between `storage` and `catalog` modes, but changing the catalog value in an existing catalog-based pipeline no longer requires recreation. Resolves #4692
1 parent 7376ed9 commit 0148dbb

File tree

4 files changed

+212
-3
lines changed

4 files changed

+212
-3
lines changed

NEXT_CHANGELOG.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,8 @@
66

77
### New Features and Improvements
88

9-
* Improve `databricks_service_principals` data source ([#5164](https://github.com/databricks/terraform-provider-databricks/pull/5164))
9+
* Improve `databricks_service_principals` data source ([#5164](https://github.com/databricks/terraform-provider-databricks/pull/5164)).
10+
* Relaxed `force_new` constraint on `catalog` attribute in `databricks_pipeline` resource to allow changing the default catalog for existing pipelines ([#5180](https://github.com/databricks/terraform-provider-databricks/issues/5180)).
1011

1112
### Bug Fixes
1213

docs/resources/pipeline.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -81,7 +81,7 @@ resource "databricks_pipeline" "this" {
8181
The following arguments are supported:
8282

8383
* `name` - A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
84-
* `catalog` - The name of catalog in Unity Catalog. *Change of this parameter forces recreation of the pipeline.* (Conflicts with `storage`).
84+
* `catalog` - The name of default catalog in Unity Catalog. *Change of this parameter forces recreation of the pipeline if you switch from `storage` to `catalog` or vice versa. If pipeline was already created with `catalog` set, the value could be changed.* (Conflicts with `storage`).
8585
* `schema` - (Optional, String, Conflicts with `target`) The default schema (database) where tables are read from or published to. The presence of this attribute implies that the pipeline is in direct publishing mode.
8686
* `storage` - A location on cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. *Change of this parameter forces recreation of the pipeline.* (Conflicts with `catalog`).
8787
* `target` - (Optional, String, Conflicts with `schema`) The name of a database (in either the Hive metastore or in a UC catalog) for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.

pipelines/resource_pipeline.go

Lines changed: 38 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -198,7 +198,8 @@ func (Pipeline) CustomizeSchema(s *common.CustomizableSchema) *common.Customizab
198198

199199
// ForceNew fields
200200
s.SchemaPath("storage").SetForceNew()
201-
s.SchemaPath("catalog").SetForceNew()
201+
// catalog can be updated in-place, but switching between storage and catalog requires recreation
202+
// (handled in CustomizeDiff)
202203
s.SchemaPath("gateway_definition", "connection_id").SetForceNew()
203204
s.SchemaPath("gateway_definition", "gateway_storage_catalog").SetForceNew()
204205
s.SchemaPath("gateway_definition", "gateway_storage_schema").SetForceNew()
@@ -335,5 +336,41 @@ func ResourcePipeline() common.Resource {
335336
Timeouts: &schema.ResourceTimeout{
336337
Default: schema.DefaultTimeout(DefaultTimeout),
337338
},
339+
CustomizeDiff: func(ctx context.Context, d *schema.ResourceDiff) error {
340+
// Allow changing catalog value in existing pipelines, but force recreation
341+
// when switching between storage and catalog (or vice versa).
342+
// This should only run on update, thus we skip this check if the ID is not known.
343+
if d.Id() != "" {
344+
storageChanged := d.HasChange("storage")
345+
catalogChanged := d.HasChange("catalog")
346+
347+
// If both changed, it means we're switching between storage and catalog modes
348+
if storageChanged && catalogChanged {
349+
oldStorage, newStorage := d.GetChange("storage")
350+
oldCatalog, newCatalog := d.GetChange("catalog")
351+
352+
// Force new if switching from storage to catalog
353+
if oldStorage != "" && oldStorage != nil && newCatalog != "" && newCatalog != nil {
354+
if err := d.ForceNew("catalog"); err != nil {
355+
return err
356+
}
357+
if err := d.ForceNew("storage"); err != nil {
358+
return err
359+
}
360+
}
361+
362+
// Force new if switching from catalog to storage
363+
if oldCatalog != "" && oldCatalog != nil && newStorage != "" && newStorage != nil {
364+
if err := d.ForceNew("catalog"); err != nil {
365+
return err
366+
}
367+
if err := d.ForceNew("storage"); err != nil {
368+
return err
369+
}
370+
}
371+
}
372+
}
373+
return nil
374+
},
338375
}
339376
}

pipelines/resource_pipeline_test.go

Lines changed: 171 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -806,3 +806,174 @@ func TestDefault(t *testing.T) {
806806
assert.NoError(t, err)
807807
assert.Equal(t, "abcd", d.Id())
808808
}
809+
810+
func TestUpdatePipelineCatalogInPlace(t *testing.T) {
811+
state := pipelines.PipelineStateRunning
812+
spec := pipelines.PipelineSpec{
813+
Id: "abcd",
814+
Name: "test",
815+
Catalog: "new_catalog",
816+
Libraries: []pipelines.PipelineLibrary{
817+
{
818+
Notebook: &pipelines.NotebookLibrary{
819+
Path: "/Test",
820+
},
821+
},
822+
},
823+
Filters: &pipelines.Filters{
824+
Include: []string{"com.databricks.include"},
825+
},
826+
Channel: "CURRENT",
827+
Edition: "ADVANCED",
828+
}
829+
qa.ResourceFixture{
830+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
831+
e := w.GetMockPipelinesAPI().EXPECT()
832+
e.Update(mock.Anything, pipelines.EditPipeline{
833+
Id: "abcd",
834+
PipelineId: "abcd",
835+
Name: "test",
836+
Catalog: "new_catalog",
837+
Libraries: []pipelines.PipelineLibrary{
838+
{
839+
Notebook: &pipelines.NotebookLibrary{
840+
Path: "/Test",
841+
},
842+
},
843+
},
844+
Filters: &pipelines.Filters{
845+
Include: []string{"com.databricks.include"},
846+
},
847+
Channel: "CURRENT",
848+
Edition: "ADVANCED",
849+
}).Return(nil)
850+
e.Get(mock.Anything, pipelines.GetPipelineRequest{
851+
PipelineId: "abcd",
852+
}).Return(&pipelines.GetPipelineResponse{
853+
PipelineId: "abcd",
854+
Spec: &spec,
855+
State: state,
856+
}, nil).Twice()
857+
},
858+
Resource: ResourcePipeline(),
859+
HCL: `name = "test"
860+
catalog = "new_catalog"
861+
library {
862+
notebook {
863+
path = "/Test"
864+
}
865+
}
866+
filters {
867+
include = [ "com.databricks.include" ]
868+
}`,
869+
InstanceState: map[string]string{
870+
"name": "test",
871+
"catalog": "old_catalog",
872+
},
873+
Update: true,
874+
ID: "abcd",
875+
}.ApplyAndExpectData(t, map[string]any{
876+
"id": "abcd",
877+
"catalog": "new_catalog",
878+
})
879+
}
880+
881+
func TestUpdatePipelineStorageToCatalogForceNew(t *testing.T) {
882+
state := pipelines.PipelineStateRunning
883+
spec := pipelines.PipelineSpec{
884+
Id: "abcd",
885+
Name: "test",
886+
Storage: "/test/storage",
887+
Libraries: []pipelines.PipelineLibrary{
888+
{
889+
Notebook: &pipelines.NotebookLibrary{
890+
Path: "/Test",
891+
},
892+
},
893+
},
894+
Filters: &pipelines.Filters{
895+
Include: []string{"com.databricks.include"},
896+
},
897+
Channel: "CURRENT",
898+
Edition: "ADVANCED",
899+
}
900+
qa.ResourceFixture{
901+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
902+
e := w.GetMockPipelinesAPI().EXPECT()
903+
e.Update(mock.Anything, mock.Anything).Return(nil)
904+
e.Get(mock.Anything, pipelines.GetPipelineRequest{
905+
PipelineId: "abcd",
906+
}).Return(&pipelines.GetPipelineResponse{
907+
PipelineId: "abcd",
908+
Spec: &spec,
909+
State: state,
910+
}, nil)
911+
},
912+
RequiresNew: true,
913+
Resource: ResourcePipeline(),
914+
Update: true,
915+
ID: "abcd",
916+
InstanceState: map[string]string{
917+
"name": "test",
918+
"storage": "/test/storage",
919+
},
920+
HCL: `
921+
name = "test"
922+
catalog = "new_catalog"
923+
library {
924+
notebook {
925+
path = "/Test"
926+
}
927+
}`,
928+
}.ApplyNoError(t)
929+
}
930+
931+
func TestUpdatePipelineCatalogToStorageForceNew(t *testing.T) {
932+
state := pipelines.PipelineStateRunning
933+
spec := pipelines.PipelineSpec{
934+
Id: "abcd",
935+
Name: "test",
936+
Catalog: "old_catalog",
937+
Libraries: []pipelines.PipelineLibrary{
938+
{
939+
Notebook: &pipelines.NotebookLibrary{
940+
Path: "/Test",
941+
},
942+
},
943+
},
944+
Filters: &pipelines.Filters{
945+
Include: []string{"com.databricks.include"},
946+
},
947+
Channel: "CURRENT",
948+
Edition: "ADVANCED",
949+
}
950+
qa.ResourceFixture{
951+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
952+
e := w.GetMockPipelinesAPI().EXPECT()
953+
e.Update(mock.Anything, mock.Anything).Return(nil)
954+
e.Get(mock.Anything, pipelines.GetPipelineRequest{
955+
PipelineId: "abcd",
956+
}).Return(&pipelines.GetPipelineResponse{
957+
PipelineId: "abcd",
958+
Spec: &spec,
959+
State: state,
960+
}, nil)
961+
},
962+
RequiresNew: true,
963+
Resource: ResourcePipeline(),
964+
Update: true,
965+
ID: "abcd",
966+
InstanceState: map[string]string{
967+
"name": "test",
968+
"catalog": "old_catalog",
969+
},
970+
HCL: `
971+
name = "test"
972+
storage = "/test/storage"
973+
library {
974+
notebook {
975+
path = "/Test"
976+
}
977+
}`,
978+
}.ApplyNoError(t)
979+
}

0 commit comments

Comments
 (0)