Skip to content

Commit 67dea81

Browse files
authored
Added databricks_jobs data resource (#1138)
Retrieves a list of databricks_job ids, that were created by Terraform or manually, so that special handling could be applied. ## Example Usage Granting view [databricks_permissions](../resources/permissions.md) to all [databricks_job](../resources/job.md) within the workspace: ```hcl data "databricks_jobs" "this" {} resource "databricks_permissions" "everyone_can_view_all_jobs" { for_each = data.databricks_jobs.this.ids job_id = each.value access_control { group_name = "users" permission_level = "CAN_VIEW" } } ``` Getting ID of specific [databricks_job](../resources/job.md) by name: ```hcl data "databricks_jobs" "this" {} output "x" { value = "ID of `x` job is ${data.databricks_jobs.this.ids["x"]}" sensitive = false } ```
1 parent 83a5ea5 commit 67dea81

File tree

10 files changed

+194
-2
lines changed

10 files changed

+194
-2
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
## 0.5.1
44

55
* Added an extended documentation from provisioning AWS PrivateLink workspace ([#1084](https://github.com/databrickslabs/terraform-provider-databricks/pull/1084)).
6+
* Added `databricks_jobs` data resource to get a map of all job names and their ids ([#1138](https://github.com/databrickslabs/terraform-provider-databricks/pull/1138)).
67

78
## 0.5.0
89

common/reflect_resource.go

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -171,6 +171,11 @@ func diffSuppressor(zero string) func(k, old, new string, d *schema.ResourceData
171171
func typeToSchema(v reflect.Value, t reflect.Type, path []string) map[string]*schema.Schema {
172172
scm := map[string]*schema.Schema{}
173173
rk := v.Kind()
174+
if rk == reflect.Ptr {
175+
v = v.Elem()
176+
t = v.Type()
177+
rk = v.Kind()
178+
}
174179
if rk != reflect.Struct {
175180
panic(fmt.Errorf("Schema value of Struct is expected, but got %s: %#v", reflectKind(rk), v))
176181
}
@@ -384,7 +389,11 @@ func isValueNilOrEmpty(valueField *reflect.Value, fieldPath string) bool {
384389

385390
// StructToData reads result using schema onto resource data
386391
func StructToData(result interface{}, s map[string]*schema.Schema, d *schema.ResourceData) error {
387-
return iterFields(reflect.ValueOf(result), []string{}, s, func(
392+
v := reflect.ValueOf(result)
393+
if v.Kind() == reflect.Ptr {
394+
v = v.Elem()
395+
}
396+
return iterFields(v, []string{}, s, func(
388397
fieldSchema *schema.Schema, path []string, valueField *reflect.Value) error {
389398
fieldValue := valueField.Interface()
390399
if fieldValue == nil {

common/reflect_resource_test.go

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package common
22

33
import (
4+
"context"
45
"fmt"
56
"reflect"
67
"testing"
@@ -579,3 +580,32 @@ func TestDataToReflectValueBypass(t *testing.T) {
579580
err := DataToReflectValue(nil, &schema.Resource{Schema: map[string]*schema.Schema{}}, reflect.ValueOf(0))
580581
assert.EqualError(t, err, "value of Struct is expected, but got Int: 0")
581582
}
583+
584+
func TestDataResource(t *testing.T) {
585+
r := func() *schema.Resource {
586+
var dto struct {
587+
In string `json:"in"`
588+
Out string `json:"out,omitempty" tf:"computed"`
589+
}
590+
return DataResource(&dto, func(ctx context.Context, c *DatabricksClient) error {
591+
dto.Out = "out: " + dto.In
592+
if dto.In == "fail" {
593+
return fmt.Errorf("happens")
594+
}
595+
return nil
596+
})
597+
}()
598+
d := r.TestResourceData()
599+
d.Set("in", "test")
600+
601+
diags := r.ReadContext(context.Background(), d, nil)
602+
assert.Len(t, diags, 1)
603+
604+
diags = r.ReadContext(context.Background(), d, &DatabricksClient{})
605+
assert.Len(t, diags, 0)
606+
assert.Equal(t, "out: test", d.Get("out"))
607+
608+
d.Set("in", "fail")
609+
diags = r.ReadContext(context.Background(), d, &DatabricksClient{})
610+
assert.Len(t, diags, 1)
611+
}

common/resource.go

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -167,3 +167,27 @@ func makeEmptyBlockSuppressFunc(name string) func(k, old, new string, d *schema.
167167
return false
168168
}
169169
}
170+
171+
func DataResource(e interface{}, read func(context.Context, *DatabricksClient) error) *schema.Resource {
172+
s := StructToSchema(e, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
173+
return &schema.Resource{
174+
Schema: s,
175+
ReadContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) {
176+
defer func() {
177+
// using recoverable() would cause more complex rewrapping of DataToStructPointer & StructToData
178+
if panic := recover(); panic != nil {
179+
diags = diag.Errorf("panic: %v", panic)
180+
}
181+
}()
182+
DataToStructPointer(d, s, e)
183+
err := read(ctx, m.(*DatabricksClient))
184+
if err != nil {
185+
err = nicerError(ctx, err, "read data")
186+
diags = diag.FromErr(err)
187+
}
188+
StructToData(e, s, d)
189+
d.SetId("_")
190+
return
191+
},
192+
}
193+
}

docs/data-sources/jobs.md

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
---
2+
subcategory: "Compute"
3+
---
4+
# databricks_jobs Data Source
5+
6+
-> **Note** If you have a fully automated setup with workspaces created by [databricks_mws_workspaces](../resources/mws_workspaces.md) or [azurerm_databricks_workspace](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/databricks_workspace), please make sure to add [depends_on attribute](../index.md#data-resources-and-authentication-is-not-configured-errors) in order to prevent _authentication is not configured for provider_ errors.
7+
8+
Retrieves a list of [databricks_job](../resources/job.md) ids, that were created by Terraform or manually, so that special handling could be applied.
9+
10+
-> **Note** Data resource will error in case of jobs with duplicate names.
11+
12+
## Example Usage
13+
14+
Granting view [databricks_permissions](../resources/permissions.md) to all [databricks_job](../resources/job.md) within the workspace:
15+
16+
```hcl
17+
data "databricks_jobs" "this" {}
18+
19+
resource "databricks_permissions" "everyone_can_view_all_jobs" {
20+
for_each = data.databricks_jobs.this.ids
21+
job_id = each.value
22+
23+
access_control {
24+
group_name = "users"
25+
permission_level = "CAN_VIEW"
26+
}
27+
}
28+
```
29+
30+
Getting ID of specific [databricks_job](../resources/job.md) by name:
31+
32+
```hcl
33+
data "databricks_jobs" "this" {}
34+
35+
output "x" {
36+
value = "ID of `x` job is ${data.databricks_jobs.this.ids["x"]}"
37+
sensitive = false
38+
}
39+
```
40+
41+
## Attribute Reference
42+
43+
This data source exports the following attributes:
44+
45+
* `ids` - map of [databricks_job](../resources/job.md) names to ids
46+
47+
## Related Resources
48+
49+
The following resources are used in the same context:
50+
51+
* [databricks_job](../resources/job.md) to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a [databricks_cluster](../resources/cluster.md).

docs/resources/job.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -221,6 +221,7 @@ The following resources are often used in the same context:
221221
* [databricks_global_init_script](global_init_script.md) to manage [global init scripts](https://docs.databricks.com/clusters/init-scripts.html#global-init-scripts), which are run on all [databricks_cluster](cluster.md#init_scripts) and [databricks_job](job.md#new_cluster).
222222
* [databricks_instance_pool](instance_pool.md) to manage [instance pools](https://docs.databricks.com/clusters/instance-pools/index.html) to reduce [cluster](cluster.md) start and auto-scaling times by maintaining a set of idle, ready-to-use instances.
223223
* [databricks_instance_profile](instance_profile.md) to manage AWS EC2 instance profiles that users can launch [databricks_cluster](cluster.md) and access data, like [databricks_mount](mount.md).
224+
* [databricks_jobs] data to get all jobs and their names from a workspace.
224225
* [databricks_library](library.md) to install a [library](https://docs.databricks.com/libraries/index.html) on [databricks_cluster](cluster.md).
225226
* [databricks_node_type](../data-sources/node_type.md) data to get the smallest node type for [databricks_cluster](cluster.md) that fits search criteria, like amount of RAM or number of cores.
226227
* [databricks_notebook](notebook.md) to manage [Databricks Notebooks](https://docs.databricks.com/notebooks/index.html).

jobs/data_jobs.go

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
package jobs
2+
3+
import (
4+
"context"
5+
"fmt"
6+
7+
"github.com/databrickslabs/terraform-provider-databricks/common"
8+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
9+
)
10+
11+
func DataSourceJobs() *schema.Resource {
12+
var response struct {
13+
Ids map[string]string `json:"ids,omitempty" tf:"computed"`
14+
}
15+
return common.DataResource(&response, func(ctx context.Context, c *common.DatabricksClient) error {
16+
jobsAPI := NewJobsAPI(ctx, c)
17+
list, err := jobsAPI.List()
18+
if err != nil {
19+
return err
20+
}
21+
response.Ids = map[string]string{}
22+
for _, v := range list.Jobs {
23+
name := v.Settings.Name
24+
_, duplicateName := response.Ids[name]
25+
if duplicateName {
26+
return fmt.Errorf("duplicate job name detected: %s", name)
27+
}
28+
response.Ids[name] = v.ID()
29+
}
30+
return nil
31+
})
32+
}

jobs/data_jobs_test.go

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
package jobs
2+
3+
import (
4+
"testing"
5+
6+
"github.com/databrickslabs/terraform-provider-databricks/qa"
7+
)
8+
9+
func TestJobsData(t *testing.T) {
10+
qa.ResourceFixture{
11+
Fixtures: []qa.HTTPFixture{
12+
{
13+
Method: "GET",
14+
Resource: "/api/2.0/jobs/list",
15+
Response: JobList{
16+
Jobs: []Job{
17+
{
18+
JobID: 123,
19+
Settings: &JobSettings{
20+
Name: "First",
21+
},
22+
},
23+
{
24+
JobID: 234,
25+
Settings: &JobSettings{
26+
Name: "Second",
27+
},
28+
},
29+
},
30+
},
31+
},
32+
},
33+
Resource: DataSourceJobs(),
34+
Read: true,
35+
NonWritable: true,
36+
ID: "_",
37+
}.ApplyAndExpectData(t, map[string]interface{}{
38+
"ids": map[string]interface{}{
39+
"First": "123",
40+
"Second": "234",
41+
},
42+
})
43+
}

provider/provider.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ func DatabricksProvider() *schema.Provider {
4444
"databricks_dbfs_file": storage.DataSourceDBFSFile(),
4545
"databricks_dbfs_file_paths": storage.DataSourceDBFSFilePaths(),
4646
"databricks_group": scim.DataSourceGroup(),
47+
"databricks_jobs": jobs.DataSourceJobs(),
4748
"databricks_node_type": clusters.DataSourceNodeType(),
4849
"databricks_notebook": workspace.DataSourceNotebook(),
4950
"databricks_notebook_paths": workspace.DataSourceNotebookPaths(),

qa/testing.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -261,7 +261,7 @@ func (f ResourceFixture) ApplyNoError(t *testing.T) {
261261
// ApplyAndExpectData is a convenience method for tests that doesn't expect error, but want to check data
262262
func (f ResourceFixture) ApplyAndExpectData(t *testing.T, data map[string]interface{}) {
263263
d, err := f.Apply(t)
264-
assert.NoError(t, err, err)
264+
require.NoError(t, err, err)
265265
for k, expected := range data {
266266
if k == "id" {
267267
assert.Equal(t, expected, d.Id())

0 commit comments

Comments
 (0)