Skip to content

Commit 4e6b4ae

Browse files
authored
[Feature] Add key argument to databricks_jobs data source to support duplicate job names (#4796)
## Changes Added support for handling jobs with duplicate names in the `databricks_jobs` data source by introducing a new `key` parameter that allows users to choose between using job names (default) or job IDs as the mapping keys. This maintains backward compatibility while providing flexibility for workspaces with duplicate job names. Key changes: - Added `key` parameter with options "name" (default) or "id" - When `key = "id"`, the data source uses job IDs as keys, allowing duplicate job names - When `key = "name"` (default), maintains existing behavior of erroring on duplicate names - Added test coverage for new functionality Close #4266 ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [X] `make test` run locally - [X] relevant change in `docs/` folder --------- Co-authored-by: Zach King <[email protected]>
1 parent 90f48d4 commit 4e6b4ae

File tree

4 files changed

+131
-10
lines changed

4 files changed

+131
-10
lines changed

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
### Breaking Changes
66

77
### New Features and Improvements
8+
* Added `key` argument to `databricks_jobs` data source to enable mapping by job ID and allow duplicate job names ([#4796](https://github.com/databricks/terraform-provider-databricks/pull/4796)).
89

910
### Bug Fixes
1011

docs/data-sources/jobs.md

Lines changed: 23 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ Retrieves a list of [databricks_job](../resources/job.md) ids, that were created
77

88
-> This data source can only be used with a workspace-level provider!
99

10-
~> Data resource will error in case of jobs with duplicate names.
10+
~> By default, this data resource will error in case of jobs with duplicate names. To support duplicate names, set `key = "id"` to map jobs by ID.
1111

1212
## Example Usage
1313

@@ -16,10 +16,6 @@ Granting view [databricks_permissions](../resources/permissions.md) to all [data
1616
```hcl
1717
data "databricks_jobs" "this" {}
1818
19-
data "databricks_jobs" "tests" {
20-
job_name_contains = "test"
21-
}
22-
2319
resource "databricks_permissions" "everyone_can_view_all_jobs" {
2420
for_each = data.databricks_jobs.this.ids
2521
job_id = each.value
@@ -34,17 +30,38 @@ resource "databricks_permissions" "everyone_can_view_all_jobs" {
3430
Getting ID of specific [databricks_job](../resources/job.md) by name:
3531

3632
```hcl
37-
data "databricks_jobs" "this" {}
33+
data "databricks_jobs" "this" {
34+
job_name_contains = "test"
35+
}
3836
3937
output "x" {
4038
value = "ID of `x` job is ${data.databricks_jobs.this.ids["x"]}"
4139
sensitive = false
4240
}
4341
```
4442

43+
Getting IDs of [databricks_job](../resources/job.md) mapped by ID, allowing duplicate job names:
44+
45+
```hcl
46+
data "databricks_jobs" "this" {
47+
key = "id"
48+
}
49+
50+
resource "databricks_permissions" "everyone_can_view_all_jobs" {
51+
for_each = data.databricks_jobs.this.ids
52+
job_id = each.value
53+
54+
access_control {
55+
group_name = "users"
56+
permission_level = "CAN_VIEW"
57+
}
58+
}
59+
```
60+
4561
## Argument Reference
4662

4763
* `job_name_contains` - (Optional) Only return [databricks_job](../resources/job.md#) ids that match the given name string (case-insensitive).
64+
* `key` - (Optional) Attribute to use for keys in the returned map of [databricks_job](../resources/job.md#) ids by. Possible values are `name` (default) or `id`. Setting to `id` uses the job ID as the map key, allowing duplicate job names.
4865

4966
## Attribute Reference
5067

jobs/data_jobs.go

Lines changed: 22 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,14 +11,21 @@ import (
1111
"github.com/databricks/terraform-provider-databricks/common"
1212
)
1313

14+
const (
15+
JobsGroupByName = "name"
16+
JobsGroupByID = "id"
17+
)
18+
1419
func DataSourceJobs() common.Resource {
1520
return common.WorkspaceData(func(ctx context.Context, data *struct {
1621
Ids map[string]string `json:"ids,omitempty" tf:"computed"`
1722
NameFilter string `json:"job_name_contains,omitempty"`
23+
Key string `json:"key,omitempty" tf:"default:name"`
1824
}, w *databricks.WorkspaceClient) error {
1925
iter := w.Jobs.List(ctx, jobs.ListJobsRequest{ExpandTasks: false, Limit: 100})
2026
data.Ids = map[string]string{}
2127
nameFilter := strings.ToLower(data.NameFilter)
28+
keyAttribute := strings.ToLower(data.Key)
2229
for iter.HasNext(ctx) {
2330
job, err := iter.Next(ctx)
2431
if err != nil {
@@ -28,11 +35,22 @@ func DataSourceJobs() common.Resource {
2835
if nameFilter != "" && !strings.Contains(strings.ToLower(name), nameFilter) {
2936
continue
3037
}
31-
_, duplicateName := data.Ids[name]
32-
if duplicateName {
33-
return fmt.Errorf("duplicate job name detected: %s", name)
38+
jobId := strconv.FormatInt(job.JobId, 10)
39+
40+
key := name
41+
if strings.EqualFold(keyAttribute, JobsGroupByName) {
42+
key = name
43+
} else if strings.EqualFold(keyAttribute, JobsGroupByID) {
44+
key = jobId
45+
} else {
46+
return fmt.Errorf("unsupported key %s, must be one of %s or %s", keyAttribute, JobsGroupByName, JobsGroupByID)
47+
}
48+
49+
_, duplicateKey := data.Ids[key]
50+
if duplicateKey {
51+
return fmt.Errorf("duplicate job %s detected: %s", keyAttribute, key)
3452
}
35-
data.Ids[name] = strconv.FormatInt(job.JobId, 10)
53+
data.Ids[key] = jobId
3654
}
3755
return nil
3856
})

jobs/data_jobs_test.go

Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,3 +75,88 @@ func TestJobsDataWithFilter(t *testing.T) {
7575
},
7676
})
7777
}
78+
79+
func TestJobsDataWithKeyID(t *testing.T) {
80+
qa.ResourceFixture{
81+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
82+
iterator := listing.SliceIterator[jobs.BaseJob]{
83+
{
84+
JobId: 123,
85+
Settings: &jobs.JobSettings{
86+
Name: "First",
87+
},
88+
},
89+
{
90+
JobId: 234,
91+
Settings: &jobs.JobSettings{
92+
Name: "Second",
93+
},
94+
},
95+
}
96+
w.GetMockJobsAPI().EXPECT().
97+
List(mock.Anything, jobs.ListJobsRequest{ExpandTasks: false, Limit: 100}).
98+
Return(&iterator)
99+
},
100+
Resource: DataSourceJobs(),
101+
Read: true,
102+
NonWritable: true,
103+
ID: "_",
104+
HCL: `key = "id"`,
105+
}.ApplyAndExpectData(t, map[string]any{
106+
"ids": map[string]any{
107+
"123": "123",
108+
"234": "234",
109+
},
110+
})
111+
}
112+
113+
func TestJobsDataWithDuplicateNames(t *testing.T) {
114+
qa.ResourceFixture{
115+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
116+
iterator := listing.SliceIterator[jobs.BaseJob]{
117+
{
118+
JobId: 123,
119+
Settings: &jobs.JobSettings{
120+
Name: "Duplicate",
121+
},
122+
},
123+
{
124+
JobId: 234,
125+
Settings: &jobs.JobSettings{
126+
Name: "Duplicate",
127+
},
128+
},
129+
}
130+
w.GetMockJobsAPI().EXPECT().
131+
List(mock.Anything, jobs.ListJobsRequest{ExpandTasks: false, Limit: 100}).
132+
Return(&iterator)
133+
},
134+
Resource: DataSourceJobs(),
135+
Read: true,
136+
NonWritable: true,
137+
ID: "_",
138+
}.ExpectError(t, "duplicate job name detected: Duplicate")
139+
}
140+
141+
func TestJobsDataWithInvalidKey(t *testing.T) {
142+
qa.ResourceFixture{
143+
MockWorkspaceClientFunc: func(w *mocks.MockWorkspaceClient) {
144+
iterator := listing.SliceIterator[jobs.BaseJob]{
145+
{
146+
JobId: 123,
147+
Settings: &jobs.JobSettings{
148+
Name: "First",
149+
},
150+
},
151+
}
152+
w.GetMockJobsAPI().EXPECT().
153+
List(mock.Anything, jobs.ListJobsRequest{ExpandTasks: false, Limit: 100}).
154+
Return(&iterator)
155+
},
156+
Resource: DataSourceJobs(),
157+
Read: true,
158+
NonWritable: true,
159+
ID: "_",
160+
HCL: `key = "invalid"`,
161+
}.ExpectError(t, "unsupported key invalid, must be one of name or id")
162+
}

0 commit comments

Comments
 (0)