Skip to content

Commit 8260d90

Browse files
authored
Updated databricks_pipeline resource to match latest APIs (#1368)
1 parent a121b39 commit 8260d90

File tree

3 files changed

+71
-42
lines changed

3 files changed

+71
-42
lines changed

docs/resources/pipeline.md

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -42,26 +42,25 @@ resource "databricks_pipeline" "this" {
4242
}
4343
}
4444
45-
filters {
46-
include = ["com.databricks.include"]
47-
exclude = ["com.databricks.exclude"]
48-
}
49-
5045
continuous = false
5146
}
5247
```
5348

5449
## Argument Reference
5550

56-
The following arguments are required:
51+
The following arguments are supported:
5752

5853
* `name` - A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
59-
* `storage` - A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location.
54+
* `storage` - A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. *Change of this parameter forces recreation of the pipeline.*
6055
* `configuration` - An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
61-
* `library` blocks - Specifies pipeline code and required artifacts. Syntax resembles [library](cluster.md#library-configuration-block) configuration block with the addition of a special `notebook` type of library that should have the `path` attribute.
62-
* `cluster` blocks - [Clusters](cluster.md) to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline.
56+
* `library` blocks - Specifies pipeline code and required artifacts. Syntax resembles [library](cluster.md#library-configuration-block) configuration block with the addition of a special `notebook` type of library that should have the `path` attribute. *Right now only the `notebook` type is supported.*
57+
* `cluster` blocks - [Clusters](cluster.md) to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. *Please note that DLT pipeline clusters are supporting only subset of attributes as described in [documentation](https://docs.databricks.com/data-engineering/delta-live-tables/delta-live-tables-api-guide.html#pipelinesnewcluster).*
6358
* `continuous` - A flag indicating whether to run the pipeline continuously. The default value is `false`.
59+
* `development` - A flag indicating whether to run the pipeline in development mode. The default value is `false`.
60+
* `photon` - A flag indicating whether to use Photon engine. The default value is `false`.
6461
* `target` - The name of a database for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
62+
* `edition` - optional name of the [product edition](https://docs.databricks.com/data-engineering/delta-live-tables/delta-live-tables-concepts.html#editions). Supported values are: `core`, `pro`, `advanced` (default).
63+
* `channel` - optional name of the release channel for Spark version used by DLT pipeline. Supported values are: `current` (default) and `preview`.
6564

6665
## Import
6766

pipelines/resource_pipeline.go

Lines changed: 22 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88

99
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
1010
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
11+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation"
1112

1213
"github.com/databrickslabs/terraform-provider-databricks/clusters"
1314
"github.com/databrickslabs/terraform-provider-databricks/common"
@@ -27,10 +28,12 @@ type pipelineCluster struct {
2728
NumWorkers int32 `json:"num_workers,omitempty" tf:"group:size"`
2829
Autoscale *clusters.AutoScale `json:"autoscale,omitempty" tf:"group:size"`
2930

30-
NodeTypeID string `json:"node_type_id,omitempty" tf:"group:node_type,computed"`
31-
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" tf:"conflicts:instance_pool_id,computed"`
32-
InstancePoolID string `json:"instance_pool_id,omitempty" tf:"group:node_type"`
33-
AwsAttributes *clusters.AwsAttributes `json:"aws_attributes,omitempty" tf:"conflicts:instance_pool_id"`
31+
NodeTypeID string `json:"node_type_id,omitempty" tf:"group:node_type,computed"`
32+
DriverNodeTypeID string `json:"driver_node_type_id,omitempty" tf:"computed"`
33+
InstancePoolID string `json:"instance_pool_id,omitempty" tf:"group:node_type"`
34+
DriverInstancePoolID string `json:"driver_instance_pool_id,omitempty"`
35+
AwsAttributes *clusters.AwsAttributes `json:"aws_attributes,omitempty" tf:"suppress_diff"`
36+
GcpAttributes *clusters.GcpAttributes `json:"gcp_attributes,omitempty" tf:"suppress_diff"`
3437

3538
SparkConf map[string]string `json:"spark_conf,omitempty"`
3639
SparkEnvVars map[string]string `json:"spark_env_vars,omitempty"`
@@ -60,14 +63,18 @@ type filters struct {
6063
type pipelineSpec struct {
6164
ID string `json:"id,omitempty" tf:"computed"`
6265
Name string `json:"name,omitempty"`
63-
Storage string `json:"storage,omitempty"`
66+
Storage string `json:"storage,omitempty" tf:"force_new"`
6467
Configuration map[string]string `json:"configuration,omitempty"`
6568
Clusters []pipelineCluster `json:"clusters,omitempty" tf:"slice_set,alias:cluster"`
6669
Libraries []pipelineLibrary `json:"libraries,omitempty" tf:"slice_set,alias:library"`
67-
Filters *filters `json:"filters"`
70+
Filters *filters `json:"filters,omitempty"`
6871
Continuous bool `json:"continuous,omitempty"`
72+
Development bool `json:"development,omitempty"`
6973
AllowDuplicateNames bool `json:"allow_duplicate_names,omitempty"`
7074
Target string `json:"target,omitempty"`
75+
Photon bool `json:"photon,omitempty"`
76+
Edition string `json:"edition,omitempty" tf:"suppress_diff,default:advanced"`
77+
Channel string `json:"channel,omitempty" tf:"suppress_diff,default:current"`
7178
}
7279

7380
type createPipelineResponse struct {
@@ -206,18 +213,26 @@ func adjustPipelineResourceSchema(m map[string]*schema.Schema) map[string]*schem
206213

207214
awsAttributes, _ := clustersSchema["aws_attributes"].Elem.(*schema.Resource)
208215
awsAttributesSchema := awsAttributes.Schema
209-
delete(awsAttributesSchema, "first_on_demand")
210216
delete(awsAttributesSchema, "availability")
211217
delete(awsAttributesSchema, "spot_bid_price_percent")
212218
delete(awsAttributesSchema, "ebs_volume_type")
213219
delete(awsAttributesSchema, "ebs_volume_count")
214220
delete(awsAttributesSchema, "ebs_volume_size")
215221

222+
gcpAttributes, _ := clustersSchema["gcp_attributes"].Elem.(*schema.Resource)
223+
gcpAttributesSchema := gcpAttributes.Schema
224+
delete(gcpAttributesSchema, "use_preemptible_executors")
225+
delete(gcpAttributesSchema, "availability")
226+
delete(gcpAttributesSchema, "boot_disk_size")
227+
delete(gcpAttributesSchema, "zone_id")
228+
216229
m["library"].MinItems = 1
217230
m["url"] = &schema.Schema{
218231
Type: schema.TypeString,
219232
Computed: true,
220233
}
234+
m["channel"].ValidateFunc = validation.StringInSlice([]string{"current", "preview"}, true)
235+
m["edition"].ValidateFunc = validation.StringInSlice([]string{"pro", "core", "advanced"}, true)
221236

222237
return m
223238
}

pipelines/resource_pipeline_test.go

Lines changed: 41 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@ import (
44
"testing"
55

66
"github.com/databrickslabs/terraform-provider-databricks/common"
7-
"github.com/databrickslabs/terraform-provider-databricks/libraries"
87

98
"github.com/databrickslabs/terraform-provider-databricks/qa"
109
"github.com/stretchr/testify/assert"
@@ -27,11 +26,8 @@ var basicPipelineSpec = pipelineSpec{
2726
},
2827
Libraries: []pipelineLibrary{
2928
{
30-
Jar: "dbfs:/pipelines/code/abcde.jar",
31-
},
32-
{
33-
Maven: &libraries.Maven{
34-
Coordinates: "com.microsoft.azure:azure-eventhubs-spark_2.12:2.3.18",
29+
Notebook: &notebookLibrary{
30+
Path: "/Test",
3531
},
3632
},
3733
},
@@ -97,11 +93,8 @@ func TestResourcePipelineCreate(t *testing.T) {
9793
}
9894
}
9995
library {
100-
jar = "dbfs:/pipelines/code/abcde.jar"
101-
}
102-
library {
103-
maven {
104-
coordinates = "com.microsoft.azure:azure-eventhubs-spark_2.12:2.3.18"
96+
notebook {
97+
path = "/Test"
10598
}
10699
}
107100
filters {
@@ -132,7 +125,9 @@ func TestResourcePipelineCreate_Error(t *testing.T) {
132125
HCL: `name = "test"
133126
storage = "/test/storage"
134127
library {
135-
jar = "jar"
128+
notebook {
129+
path = "/Test"
130+
}
136131
}
137132
filters {
138133
include = ["a"]
@@ -181,7 +176,9 @@ func TestResourcePipelineCreate_ErrorWhenWaitingFailedCleanup(t *testing.T) {
181176
HCL: `name = "test"
182177
storage = "/test/storage"
183178
library {
184-
jar = "jar"
179+
notebook {
180+
path = "/Test"
181+
}
185182
}
186183
filters {
187184
include = ["a"]
@@ -230,7 +227,9 @@ func TestResourcePipelineCreate_ErrorWhenWaitingSuccessfulCleanup(t *testing.T)
230227
HCL: `name = "test"
231228
storage = "/test/storage"
232229
library {
233-
jar = "jar"
230+
notebook {
231+
path = "/Test"
232+
}
234233
}
235234
filters {
236235
include = ["a"]
@@ -316,14 +315,16 @@ func TestResourcePipelineUpdate(t *testing.T) {
316315
Storage: "/test/storage",
317316
Libraries: []pipelineLibrary{
318317
{
319-
Maven: &libraries.Maven{
320-
Coordinates: "coordinates",
318+
Notebook: &notebookLibrary{
319+
Path: "/Test",
321320
},
322321
},
323322
},
324323
Filters: &filters{
325324
Include: []string{"com.databricks.include"},
326325
},
326+
Channel: "current",
327+
Edition: "advanced",
327328
}
328329
d, err := qa.ResourceFixture{
329330
Fixtures: []qa.HTTPFixture{
@@ -355,13 +356,17 @@ func TestResourcePipelineUpdate(t *testing.T) {
355356
HCL: `name = "test"
356357
storage = "/test/storage"
357358
library {
358-
maven {
359-
coordinates = "coordinates"
359+
notebook {
360+
path = "/Test"
360361
}
361362
}
362363
filters {
363364
include = [ "com.databricks.include" ]
364365
}`,
366+
InstanceState: map[string]string{
367+
"name": "test",
368+
"storage": "/test/storage",
369+
},
365370
Update: true,
366371
ID: "abcd",
367372
}.Apply(t)
@@ -386,15 +391,19 @@ func TestResourcePipelineUpdate_Error(t *testing.T) {
386391
HCL: `name = "test"
387392
storage = "/test/storage"
388393
library {
389-
maven {
390-
coordinates = "coordinates"
394+
notebook {
395+
path = "/Test"
391396
}
392397
}
393398
filters {
394399
include = [ "com.databricks.include" ]
395400
}`,
396401
Update: true,
397-
ID: "abcd",
402+
InstanceState: map[string]string{
403+
"name": "test",
404+
"storage": "/test/storage",
405+
},
406+
ID: "abcd",
398407
}.Apply(t)
399408
qa.AssertErrorStartsWith(t, err, "Internal error happened")
400409
assert.Equal(t, "abcd", d.Id())
@@ -408,14 +417,16 @@ func TestResourcePipelineUpdate_FailsAfterUpdate(t *testing.T) {
408417
Storage: "/test/storage",
409418
Libraries: []pipelineLibrary{
410419
{
411-
Maven: &libraries.Maven{
412-
Coordinates: "coordinates",
420+
Notebook: &notebookLibrary{
421+
Path: "/Test",
413422
},
414423
},
415424
},
416425
Filters: &filters{
417426
Include: []string{"com.databricks.include"},
418427
},
428+
Channel: "current",
429+
Edition: "advanced",
419430
}
420431
d, err := qa.ResourceFixture{
421432
Fixtures: []qa.HTTPFixture{
@@ -438,15 +449,19 @@ func TestResourcePipelineUpdate_FailsAfterUpdate(t *testing.T) {
438449
HCL: `name = "test"
439450
storage = "/test/storage"
440451
library {
441-
maven {
442-
coordinates = "coordinates"
452+
notebook {
453+
path = "/Test"
443454
}
444455
}
445456
filters {
446457
include = [ "com.databricks.include" ]
447458
}`,
448459
Update: true,
449-
ID: "abcd",
460+
InstanceState: map[string]string{
461+
"name": "test",
462+
"storage": "/test/storage",
463+
},
464+
ID: "abcd",
450465
}.Apply(t)
451466
qa.AssertErrorStartsWith(t, err, "pipeline abcd has failed")
452467
assert.Equal(t, "abcd", d.Id(), "Id should be the same as in reading")

0 commit comments

Comments
 (0)