Skip to content

Commit ea3acc0

Browse files
authored
Add file library type to databricks_pipeline resource (#1975)
* Add `file` library type to `databricks_pipeline` resource * handle references for file path in exporter as well
1 parent f3d05e2 commit ea3acc0

File tree

4 files changed

+25
-2
lines changed

4 files changed

+25
-2
lines changed

docs/resources/pipeline.md

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,10 @@ resource "databricks_notebook" "dlt_demo" {
1212
#...
1313
}
1414
15+
resource "databricks_repo" "dlt_demo" {
16+
#...
17+
}
18+
1519
resource "databricks_pipeline" "this" {
1620
name = "Pipeline Name"
1721
storage = "/test/first-pipeline"
@@ -42,6 +46,12 @@ resource "databricks_pipeline" "this" {
4246
}
4347
}
4448
49+
library {
50+
file {
51+
path = "${databricks_repo.dlt_demo.path}/pipeline.sql"
52+
}
53+
}
54+
4555
continuous = false
4656
}
4757
```
@@ -53,7 +63,7 @@ The following arguments are supported:
5363
* `name` - A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
5464
* `storage` - A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location. *Change of this parameter forces recreation of the pipeline.*
5565
* `configuration` - An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
56-
* `library` blocks - Specifies pipeline code and required artifacts. Syntax resembles [library](cluster.md#library-configuration-block) configuration block with the addition of a special `notebook` type of library that should have the `path` attribute. *Right now only the `notebook` type is supported.*
66+
* `library` blocks - Specifies pipeline code and required artifacts. Syntax resembles [library](cluster.md#library-configuration-block) configuration block with the addition of a special `notebook` & `file` library types that should have the `path` attribute. *Right now only the `notebook` & `file` types are supported.*
5767
* `cluster` blocks - [Clusters](cluster.md) to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline. *Please note that DLT pipeline clusters are supporting only subset of attributes as described in [documentation](https://docs.databricks.com/data-engineering/delta-live-tables/delta-live-tables-api-guide.html#pipelinesnewcluster).* Also, note that `autoscale` block is extended with the `mode` parameter that controls the autoscaling algorithm (possible values are `ENHANCED` for new, enhanced autoscaling algorithm, or `LEGACY` for old algorithm).
5868
* `continuous` - A flag indicating whether to run the pipeline continuously. The default value is `false`.
5969
* `development` - A flag indicating whether to run the pipeline in development mode. The default value is `true`.

exporter/importables.go

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1432,6 +1432,9 @@ var resourcesMap map[string]importable = map[string]importable{
14321432
if lib.Notebook != nil {
14331433
ic.emitNotebookOrRepo(lib.Notebook.Path)
14341434
}
1435+
if lib.File != nil {
1436+
ic.emitNotebookOrRepo(lib.File.Path)
1437+
}
14351438
ic.emitIfDbfsFile(lib.Jar)
14361439
ic.emitIfDbfsFile(lib.Whl)
14371440
}
@@ -1489,6 +1492,8 @@ var resourcesMap map[string]importable = map[string]importable{
14891492
{Path: "cluster.driver_instance_pool_id", Resource: "databricks_instance_pool"},
14901493
{Path: "library.notebook.path", Resource: "databricks_notebook"},
14911494
{Path: "library.notebook.path", Resource: "databricks_repo", Match: "path", MatchType: MatchPrefix},
1495+
{Path: "library.file.path", Resource: "databricks_notebook"},
1496+
{Path: "library.file.path", Resource: "databricks_repo", Match: "path", MatchType: MatchPrefix},
14921497
{Path: "library.jar", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
14931498
{Path: "library.whl", Resource: "databricks_dbfs_file", Match: "dbfs_path"},
14941499
},

exporter/test-data/get-dlt-pipeline.json

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@
4242
}
4343
},
4444
{
45-
"notebook": {
45+
"file": {
4646
"path": "/Repos/[email protected]/repo/Test DLT"
4747
}
4848
}

pipelines/resource_pipeline.go

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -63,11 +63,16 @@ type NotebookLibrary struct {
6363
Path string `json:"path"`
6464
}
6565

66+
type FileLibrary struct {
67+
Path string `json:"path"`
68+
}
69+
6670
type PipelineLibrary struct {
6771
Jar string `json:"jar,omitempty"`
6872
Maven *libraries.Maven `json:"maven,omitempty"`
6973
Whl string `json:"whl,omitempty"`
7074
Notebook *NotebookLibrary `json:"notebook,omitempty"`
75+
File *FileLibrary `json:"file,omitempty"`
7176
}
7277

7378
type filters struct {
@@ -79,6 +84,7 @@ type PipelineSpec struct {
7984
ID string `json:"id,omitempty" tf:"computed"`
8085
Name string `json:"name,omitempty"`
8186
Storage string `json:"storage,omitempty" tf:"force_new"`
87+
Catalog string `json:"catalog,omitempty" tf:"force_new"`
8288
Configuration map[string]string `json:"configuration,omitempty"`
8389
Clusters []pipelineCluster `json:"clusters,omitempty" tf:"alias:cluster"`
8490
Libraries []PipelineLibrary `json:"libraries,omitempty" tf:"slice_set,alias:library"`
@@ -307,6 +313,8 @@ func adjustPipelineResourceSchema(m map[string]*schema.Schema) map[string]*schem
307313
m["edition"].ValidateFunc = validation.StringInSlice([]string{"pro", "core", "advanced"}, true)
308314

309315
m["storage"].DiffSuppressFunc = suppressStorageDiff
316+
m["storage"].ConflictsWith = []string{"catalog"}
317+
m["catalog"].ConflictsWith = []string{"storage"}
310318

311319
return m
312320
}

0 commit comments

Comments
 (0)