Skip to content

Commit 42e1a45

Browse files
authored
[Fix] Save Pipeline resource to state in addition to spec (#3869)
## Changes After #3839, the provider's behavior of DLT pipelines regressed. In particular, the `Read` method stopped populating certain fields from the GetPipelineResponse into Terraform state. This PR addresses this by additionally writing all top-level fields into the state as part of the read operation. Resolves #3855. ## Tests Unit tests cover the case specified in the issue. - [x] `make test` run locally - [ ] relevant change in `docs/` folder - [ ] covered with integration tests in `internal/acceptance` - [ ] relevant acceptance tests are passing - [ ] using Go SDK
1 parent 81be591 commit 42e1a45

File tree

2 files changed

+79
-15
lines changed

2 files changed

+79
-15
lines changed

pipelines/resource_pipeline.go

Lines changed: 18 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -176,7 +176,8 @@ type Pipeline struct {
176176

177177
func (Pipeline) Aliases() map[string]map[string]string {
178178
return map[string]map[string]string{
179-
"pipelines.Pipeline": aliasMap,
179+
"pipelines.Pipeline": aliasMap,
180+
"pipelines.PipelineSpec": aliasMap,
180181
}
181182

182183
}
@@ -217,6 +218,7 @@ func (Pipeline) CustomizeSchema(s *common.CustomizableSchema) *common.Customizab
217218
s.SchemaPath("cause").SetComputed()
218219
s.SchemaPath("cluster_id").SetComputed()
219220
s.SchemaPath("creator_user_name").SetComputed()
221+
s.SchemaPath("run_as_user_name").SetComputed()
220222

221223
// SuppressDiff fields
222224
s.SchemaPath("edition").SetSuppressDiff()
@@ -274,12 +276,7 @@ func ResourcePipeline() common.Resource {
274276
if err != nil {
275277
return err
276278
}
277-
err = Create(w, ctx, d, d.Timeout(schema.TimeoutCreate))
278-
if err != nil {
279-
return err
280-
}
281-
d.Set("url", c.FormatURL("#joblist/pipelines/", d.Id()))
282-
return nil
279+
return Create(w, ctx, d, d.Timeout(schema.TimeoutCreate))
283280
},
284281
Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
285282
w, err := c.WorkspaceClient()
@@ -294,7 +291,20 @@ func ResourcePipeline() common.Resource {
294291
if readPipeline.Spec == nil {
295292
return fmt.Errorf("pipeline spec is nil for '%v'", readPipeline.PipelineId)
296293
}
297-
return common.StructToData(readPipeline.Spec, pipelineSchema, d)
294+
p := Pipeline{
295+
PipelineSpec: *readPipeline.Spec,
296+
Cause: readPipeline.Cause,
297+
ClusterId: readPipeline.ClusterId,
298+
CreatorUserName: readPipeline.CreatorUserName,
299+
Health: readPipeline.Health,
300+
LastModified: readPipeline.LastModified,
301+
LatestUpdates: readPipeline.LatestUpdates,
302+
RunAsUserName: readPipeline.RunAsUserName,
303+
State: readPipeline.State,
304+
// Provides the URL to the pipeline in the Databricks UI.
305+
URL: c.FormatURL("#joblist/pipelines/", d.Id()),
306+
}
307+
return common.StructToData(p, pipelineSchema, d)
298308
},
299309
Update: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error {
300310
w, err := c.WorkspaceClient()

pipelines/resource_pipeline_test.go

Lines changed: 61 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -120,10 +120,11 @@ func TestResourcePipelineCreate(t *testing.T) {
120120
e.Get(mock.Anything, pipelines.GetPipelineRequest{
121121
PipelineId: "abcd",
122122
}).Return(&pipelines.GetPipelineResponse{
123-
PipelineId: "abcd",
124-
Name: "test-pipeline",
125-
State: pipelines.PipelineStateRunning,
126-
Spec: &basicPipelineSpec,
123+
PipelineId: "abcd",
124+
Name: "test-pipeline",
125+
State: pipelines.PipelineStateRunning,
126+
LastModified: 123456,
127+
Spec: &basicPipelineSpec,
127128
}, nil).Once()
128129

129130
},
@@ -158,7 +159,9 @@ func TestResourcePipelineCreate(t *testing.T) {
158159
}
159160
`,
160161
}.ApplyAndExpectData(t, map[string]any{
161-
"id": "abcd",
162+
"id": "abcd",
163+
"last_modified": 123456,
164+
"state": "RUNNING",
162165
})
163166
}
164167

@@ -285,8 +288,59 @@ func TestResourcePipelineRead(t *testing.T) {
285288
"key1": "value1",
286289
"key2": "value2",
287290
},
288-
"filters.0.include.0": "com.databricks.include",
289-
"continuous": false,
291+
"cluster": []any{
292+
map[string]any{
293+
"apply_policy_default_values": false,
294+
"autoscale": []any{},
295+
"aws_attributes": []any{},
296+
"azure_attributes": []any{},
297+
"cluster_log_conf": []any{},
298+
"driver_instance_pool_id": "",
299+
"driver_node_type_id": "",
300+
"enable_local_disk_encryption": false,
301+
"gcp_attributes": []any{},
302+
"init_scripts": []any{},
303+
"instance_pool_id": "",
304+
"node_type_id": "",
305+
"num_workers": 0,
306+
"policy_id": "",
307+
"spark_conf": map[string]any{},
308+
"spark_env_vars": map[string]any{},
309+
"ssh_public_keys": []any{},
310+
"label": "default",
311+
"custom_tags": map[string]any{
312+
"cluster_tag1": "cluster_value1",
313+
},
314+
},
315+
},
316+
"library": []any{
317+
map[string]any{
318+
"file": []any{},
319+
"maven": []any{},
320+
"jar": "",
321+
"whl": "",
322+
"notebook": []any{
323+
map[string]any{
324+
"path": "/Test",
325+
},
326+
},
327+
},
328+
},
329+
"filters": []any{
330+
map[string]any{
331+
"include": []any{"com.databricks.include"},
332+
"exclude": []any{"com.databricks.exclude"},
333+
},
334+
},
335+
"deployment": []any{
336+
map[string]any{
337+
"kind": "BUNDLE",
338+
"metadata_file_path": "/foo/bar",
339+
},
340+
},
341+
"edition": "ADVANCED",
342+
"channel": "CURRENT",
343+
"continuous": false,
290344
})
291345
}
292346

0 commit comments

Comments
 (0)