Skip to content

Commit 42dd50c

Browse files
authored
[FEATURE] Support Power BI task in Terraform (#4647)
## Changes <!-- Summary of your changes that are easy to understand --> - Support Power BI task in Terraform ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [ ] covered with integration tests in `internal/acceptance` - [x] using Go SDK - [ ] using TF Plugin Framework
1 parent 18f8055 commit 42dd50c

File tree

4 files changed

+143
-0
lines changed

4 files changed

+143
-0
lines changed

NEXT_CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,8 @@
44

55
### New Features and Improvements
66

7+
* Add support for `power_bi_task` in jobs ([#4647](https://github.com/databricks/terraform-provider-databricks/pull/4647))
8+
79
### Bug Fixes
810

911
### Documentation

docs/resources/job.md

Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,7 @@ This block describes individual tasks:
122122
* `for_each_task`
123123
* `notebook_task`
124124
* `pipeline_task`
125+
* `power_bi_task`
125126
* `python_wheel_task`
126127
* `run_job_task`
127128
* `spark_jar_task`
@@ -200,6 +201,25 @@ You also need to include a `git_source` block to configure the repository that c
200201

201202
-> The following configuration blocks are only supported inside a `task` block
202203

204+
#### power_bi_task Configuration Block
205+
206+
The `power_bi_task` triggers a Power BI semantic model update.
207+
208+
* `tables` (Required) (Array) The tables to be exported to Power BI. Block consists of following fields:
209+
* `storage_mode` (Required) The Power BI storage mode of the table
210+
* `catalog` (Required) The catalog name in Databricks
211+
* `schema` (Required) The schema name in Databricks
212+
* `name` (Optional) The table name in Databricks. If empty, all tables under the schema are selected.
213+
* `warehouse_id` (Required) The SQL warehouse ID to use as the Power BI data source
214+
* `power_bi_model` (Required) The semantic model to update. Block consists of following fields:
215+
* `workspace_name` (Required) The name of the Power BI workspace of the model
216+
* `model_name` (Required) The name of the Power BI model
217+
* `storage_mode` (Required) The default storage mode of the Power BI model
218+
* `authentication_method` (Required) How the published Power BI model authenticates to Databricks
219+
* `overwrite_existing` (Optional) Whether to overwrite existing Power BI models. Default is false
220+
* `connection_resource_name` (Required) The resource name of the UC connection to authenticate from Databricks to Power BI
221+
* `refresh_after_update` (Optional) Whether the model should be refreshed after the update. Default is false
222+
203223
#### python_wheel_task Configuration Block
204224

205225
* `entry_point` - (Optional) Python function as entry point for the task

jobs/resource_job.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -145,6 +145,7 @@ type ForEachNestedTask struct {
145145
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"`
146146
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"`
147147
PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"`
148+
PowerBiTask *jobs.PowerBiTask `json:"power_bi_task,omitempty" tf:"group:task_type"`
148149
PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"`
149150
SqlTask *SqlTask `json:"sql_task,omitempty" tf:"group:task_type"`
150151
DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"`
@@ -226,6 +227,7 @@ type JobTaskSettings struct {
226227
SparkPythonTask *SparkPythonTask `json:"spark_python_task,omitempty" tf:"group:task_type"`
227228
SparkSubmitTask *SparkSubmitTask `json:"spark_submit_task,omitempty" tf:"group:task_type"`
228229
PipelineTask *PipelineTask `json:"pipeline_task,omitempty" tf:"group:task_type"`
230+
PowerBiTask *jobs.PowerBiTask `json:"power_bi_task,omitempty" tf:"group:task_type"`
229231
PythonWheelTask *PythonWheelTask `json:"python_wheel_task,omitempty" tf:"group:task_type"`
230232
SqlTask *SqlTask `json:"sql_task,omitempty" tf:"group:task_type"`
231233
DbtTask *DbtTask `json:"dbt_task,omitempty" tf:"group:task_type"`

jobs/resource_job_test.go

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,13 +8,15 @@ import (
88
"time"
99

1010
"github.com/databricks/databricks-sdk-go/apierr"
11+
"github.com/databricks/databricks-sdk-go/experimental/mocks"
1112
"github.com/databricks/databricks-sdk-go/service/compute"
1213
"github.com/databricks/databricks-sdk-go/service/jobs"
1314
"github.com/databricks/terraform-provider-databricks/clusters"
1415
"github.com/databricks/terraform-provider-databricks/common"
1516
"github.com/databricks/terraform-provider-databricks/qa"
1617

1718
"github.com/stretchr/testify/assert"
19+
"github.com/stretchr/testify/mock"
1820
"github.com/stretchr/testify/require"
1921
)
2022

@@ -621,6 +623,123 @@ func TestResourceJobCreate_ForEachTask(t *testing.T) {
621623
assert.NoError(t, err)
622624
assert.Equal(t, "789", d.Id())
623625
}
626+
func TestResourceJobCreate_PowerBiTask(t *testing.T) {
627+
d, err := qa.ResourceFixture{
628+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
629+
e := w.GetMockJobsAPI().EXPECT()
630+
e.Create(mock.Anything, jobs.CreateJob{
631+
Name: "power_bi_task_name",
632+
MaxConcurrentRuns: 1,
633+
Queue: &jobs.QueueSettings{
634+
Enabled: false,
635+
},
636+
Tasks: []jobs.Task{
637+
{
638+
TaskKey: "power_bi_task_key",
639+
PowerBiTask: &jobs.PowerBiTask{
640+
ConnectionResourceName: "test-connection",
641+
PowerBiModel: &jobs.PowerBiModel{
642+
AuthenticationMethod: jobs.AuthenticationMethodOauth,
643+
ModelName: "TestModel",
644+
OverwriteExisting: true,
645+
StorageMode: jobs.StorageModeDirectQuery,
646+
WorkspaceName: "TestWorkspace",
647+
},
648+
RefreshAfterUpdate: true,
649+
Tables: []jobs.PowerBiTable{
650+
{
651+
Catalog: "TestCatalog",
652+
Name: "TestTable1",
653+
Schema: "TestSchema",
654+
StorageMode: jobs.StorageModeDirectQuery,
655+
},
656+
{
657+
Catalog: "TestCatalog",
658+
Name: "TestTable2",
659+
Schema: "TestSchema",
660+
StorageMode: jobs.StorageModeDual,
661+
},
662+
},
663+
WarehouseId: "12345",
664+
},
665+
},
666+
},
667+
}).
668+
Return(&jobs.CreateResponse{
669+
JobId: 789,
670+
}, nil)
671+
e.GetByJobId(mock.Anything, int64(789)).Return(&jobs.Job{
672+
JobId: 789,
673+
Settings: &jobs.JobSettings{
674+
Name: "power_bi_task_name",
675+
Tasks: []jobs.Task{
676+
{
677+
TaskKey: "power_bi_task_key",
678+
PowerBiTask: &jobs.PowerBiTask{
679+
ConnectionResourceName: "test-connection",
680+
PowerBiModel: &jobs.PowerBiModel{
681+
AuthenticationMethod: jobs.AuthenticationMethodOauth,
682+
ModelName: "TestModel",
683+
OverwriteExisting: true,
684+
StorageMode: jobs.StorageModeDirectQuery,
685+
WorkspaceName: "TestWorkspace",
686+
},
687+
RefreshAfterUpdate: true,
688+
Tables: []jobs.PowerBiTable{
689+
{
690+
Catalog: "TestCatalog",
691+
Name: "TestTable1",
692+
Schema: "TestSchema",
693+
StorageMode: jobs.StorageModeDirectQuery,
694+
},
695+
{
696+
Catalog: "TestCatalog",
697+
Name: "TestTable2",
698+
Schema: "TestSchema",
699+
StorageMode: jobs.StorageModeDual,
700+
},
701+
},
702+
WarehouseId: "12345",
703+
},
704+
},
705+
},
706+
},
707+
}, nil)
708+
},
709+
Create: true,
710+
Resource: ResourceJob(),
711+
HCL: `name = "power_bi_task_name"
712+
task {
713+
task_key = "power_bi_task_key"
714+
power_bi_task {
715+
connection_resource_name = "test-connection"
716+
power_bi_model {
717+
authentication_method = "OAUTH"
718+
model_name = "TestModel"
719+
overwrite_existing = true
720+
storage_mode = "DIRECT_QUERY"
721+
workspace_name = "TestWorkspace"
722+
}
723+
refresh_after_update = true
724+
tables {
725+
catalog = "TestCatalog"
726+
name = "TestTable1"
727+
schema = "TestSchema"
728+
storage_mode = "DIRECT_QUERY"
729+
}
730+
tables {
731+
catalog = "TestCatalog"
732+
name = "TestTable2"
733+
schema = "TestSchema"
734+
storage_mode = "DUAL"
735+
}
736+
warehouse_id = "12345"
737+
}
738+
}`,
739+
}.Apply(t)
740+
assert.NoError(t, err)
741+
assert.Equal(t, "789", d.Id())
742+
}
624743
func TestResourceJobCreate_JobParameters(t *testing.T) {
625744
d, err := qa.ResourceFixture{
626745
Fixtures: []qa.HTTPFixture{

0 commit comments

Comments
 (0)