Skip to content

Commit 5254936

Browse files
authored
Added search by name for databricks_cluster data source (#1901)
* add search by name for `databricks_cluster` data source Also expose `id` for `databricks_cluster` & `databricks_job` data sources so it's easier to use results. This is a follow up for #1885 * address review feedback
1 parent 7e11263 commit 5254936

File tree

6 files changed

+179
-17
lines changed

6 files changed

+179
-17
lines changed

clusters/data_cluster.go

Lines changed: 33 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,25 +2,52 @@ package clusters
22

33
import (
44
"context"
5+
"fmt"
56

67
"github.com/databricks/terraform-provider-databricks/common"
78
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
89
)
910

1011
func DataSourceCluster() *schema.Resource {
1112
type clusterData struct {
12-
ClusterId string `json:"cluster_id"`
13+
Id string `json:"id,omitempty" tf:"computed"`
14+
ClusterId string `json:"cluster_id,omitempty" tf:"computed"`
15+
Name string `json:"cluster_name,omitempty" tf:"computed"`
1316
ClusterInfo *ClusterInfo `json:"cluster_info,omitempty" tf:"computed"`
1417
}
1518
return common.DataResource(clusterData{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error {
1619
data := e.(*clusterData)
1720
clusterAPI := NewClustersAPI(ctx, c)
18-
clusterInfo, err := clusterAPI.Get(data.ClusterId)
19-
20-
data.ClusterInfo = &clusterInfo
21-
if err != nil {
22-
return err
21+
if data.Name != "" {
22+
clusters, err := clusterAPI.List()
23+
if err != nil {
24+
return err
25+
}
26+
namedClusters := []ClusterInfo{}
27+
for _, clst := range clusters {
28+
cluster := clst
29+
if cluster.ClusterName == data.Name {
30+
namedClusters = append(namedClusters, cluster)
31+
}
32+
}
33+
if len(namedClusters) == 0 {
34+
return fmt.Errorf("there is no cluster with name '%s'", data.Name)
35+
}
36+
if len(namedClusters) > 1 {
37+
return fmt.Errorf("there is more than one cluster with name '%s'", data.Name)
38+
}
39+
data.ClusterInfo = &namedClusters[0]
40+
} else if data.ClusterId != "" {
41+
cls, err := clusterAPI.Get(data.ClusterId)
42+
if err != nil {
43+
return err
44+
}
45+
data.ClusterInfo = &cls
46+
} else {
47+
return fmt.Errorf("you need to specify either `cluster_name` or `cluster_id`")
2348
}
49+
data.Id = data.ClusterInfo.ClusterID
50+
data.ClusterId = data.ClusterInfo.ClusterID
2451

2552
return nil
2653
})

clusters/data_cluster_test.go

Lines changed: 122 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ import (
99
"github.com/stretchr/testify/require"
1010
)
1111

12-
func TestClusterData(t *testing.T) {
12+
func TestClusterDataByID(t *testing.T) {
1313
d, err := qa.ResourceFixture{
1414
Fixtures: []qa.HTTPFixture{
1515
{
@@ -47,6 +47,117 @@ func TestClusterData(t *testing.T) {
4747
}
4848
}
4949

50+
func TestClusterDataByName(t *testing.T) {
51+
d, err := qa.ResourceFixture{
52+
Fixtures: []qa.HTTPFixture{
53+
{
54+
Method: "GET",
55+
Resource: "/api/2.0/clusters/list",
56+
57+
Response: ClusterList{
58+
Clusters: []ClusterInfo{{
59+
ClusterID: "abc",
60+
NumWorkers: 100,
61+
ClusterName: "Shared Autoscaling",
62+
SparkVersion: "7.1-scala12",
63+
NodeTypeID: "i3.xlarge",
64+
AutoterminationMinutes: 15,
65+
State: ClusterStateRunning,
66+
AutoScale: &AutoScale{
67+
MaxWorkers: 4,
68+
},
69+
}},
70+
},
71+
},
72+
},
73+
Resource: DataSourceCluster(),
74+
HCL: `cluster_name = "Shared Autoscaling"`,
75+
Read: true,
76+
NonWritable: true,
77+
ID: "_",
78+
}.Apply(t)
79+
require.NoError(t, err, err)
80+
assert.Equal(t, 15, d.Get("cluster_info.0.autotermination_minutes"))
81+
assert.Equal(t, "Shared Autoscaling", d.Get("cluster_info.0.cluster_name"))
82+
assert.Equal(t, "i3.xlarge", d.Get("cluster_info.0.node_type_id"))
83+
assert.Equal(t, 4, d.Get("cluster_info.0.autoscale.0.max_workers"))
84+
assert.Equal(t, "RUNNING", d.Get("cluster_info.0.state"))
85+
86+
for k, v := range d.State().Attributes {
87+
fmt.Printf("assert.Equal(t, %#v, d.Get(%#v))\n", v, k)
88+
}
89+
}
90+
91+
func TestClusterDataByName_NotFound(t *testing.T) {
92+
qa.ResourceFixture{
93+
Fixtures: []qa.HTTPFixture{
94+
{
95+
Method: "GET",
96+
Resource: "/api/2.0/clusters/list",
97+
98+
Response: ClusterList{
99+
Clusters: []ClusterInfo{},
100+
},
101+
},
102+
},
103+
Resource: DataSourceCluster(),
104+
HCL: `cluster_name = "Unknown"`,
105+
Read: true,
106+
NonWritable: true,
107+
ID: "_",
108+
}.ExpectError(t, "there is no cluster with name 'Unknown'")
109+
}
110+
111+
func TestClusterDataByName_DuplicateNames(t *testing.T) {
112+
qa.ResourceFixture{
113+
Fixtures: []qa.HTTPFixture{
114+
{
115+
Method: "GET",
116+
Resource: "/api/2.0/clusters/list",
117+
118+
Response: ClusterList{
119+
Clusters: []ClusterInfo{
120+
{
121+
ClusterID: "abc",
122+
NumWorkers: 100,
123+
ClusterName: "Shared Autoscaling",
124+
SparkVersion: "7.1-scala12",
125+
NodeTypeID: "i3.xlarge",
126+
AutoterminationMinutes: 15,
127+
State: ClusterStateRunning,
128+
},
129+
{
130+
ClusterID: "def",
131+
NumWorkers: 100,
132+
ClusterName: "Shared Autoscaling",
133+
SparkVersion: "7.1-scala12",
134+
NodeTypeID: "i3.xlarge",
135+
AutoterminationMinutes: 15,
136+
State: ClusterStateRunning,
137+
},
138+
},
139+
},
140+
},
141+
},
142+
Resource: DataSourceCluster(),
143+
HCL: `cluster_name = "Shared Autoscaling"`,
144+
Read: true,
145+
NonWritable: true,
146+
ID: "_",
147+
}.ExpectError(t, "there is more than one cluster with name 'Shared Autoscaling'")
148+
}
149+
150+
func TestClusterDataByName_ListError(t *testing.T) {
151+
qa.ResourceFixture{
152+
Fixtures: qa.HTTPFailures,
153+
Resource: DataSourceCluster(),
154+
HCL: `cluster_name = "Unknown"`,
155+
Read: true,
156+
NonWritable: true,
157+
ID: "_",
158+
}.ExpectError(t, "I'm a teapot")
159+
}
160+
50161
func TestClusterData_Error(t *testing.T) {
51162
qa.ResourceFixture{
52163
Fixtures: qa.HTTPFailures,
@@ -57,3 +168,13 @@ func TestClusterData_Error(t *testing.T) {
57168
ID: "_",
58169
}.ExpectError(t, "I'm a teapot")
59170
}
171+
172+
func TestClusterData_ErrorNoParams(t *testing.T) {
173+
qa.ResourceFixture{
174+
Resource: DataSourceCluster(),
175+
Read: true,
176+
NonWritable: true,
177+
HCL: "",
178+
ID: "_",
179+
}.ExpectError(t, "you need to specify either `cluster_name` or `cluster_id`")
180+
}

docs/data-sources/cluster.md

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,14 @@ data "databricks_cluster" "all" {
2424

2525
## Argument Reference
2626

27-
* `cluster_id` - (Required) The id of the cluster
27+
* `cluster_id` - (Required if `cluster_name` isn't specified) The id of the cluster
28+
* `cluster_name` - (Required if `cluster_id` isn't specified) The exact name of the cluster to search
2829

2930
## Attribute Reference
3031

3132
This data source exports the following attributes:
3233

34+
* `id` - cluster ID
3335
* `cluster_info` block, consisting of following fields:
3436
* `cluster_name` - Cluster name, which doesn’t have to be unique.
3537
* `spark_version` - [Runtime version](https://docs.databricks.com/runtime/index.html) of the cluster.

docs/data-sources/job.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ data "databricks_job" "this" {
1616
job_name = "My job"
1717
}
1818
19-
output "cluster_id" {
19+
output "job_num_workers" {
2020
value = data.databricks_job.this.job_settings[0].settings[0].new_cluster[0].num_workers
2121
sensitive = false
2222
}
@@ -26,8 +26,9 @@ output "cluster_id" {
2626

2727
This data source exports the following attributes:
2828

29-
* `job_id` - the id of [databricks_job](../resources/job.md) if the resource was matched by name.
30-
* `job_name` - the job name of [databricks_job](../resources/job.md) if the resource was matched by id.
29+
30+
* `id` - the id of [databricks_job](../resources/job.md) if the resource was matched by name.
31+
* `name` - the job name of [databricks_job](../resources/job.md) if the resource was matched by id.
3132
* `job_settings` - the same fields as in [databricks_job](../resources/job.md).
3233

3334
## Related Resources

jobs/data_job.go

Lines changed: 12 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,15 +10,23 @@ import (
1010

1111
func DataSourceJob() *schema.Resource {
1212
type queryableJobData struct {
13-
Id string `json:"job_id,omitempty" tf:"computed"`
14-
Name string `json:"job_name,omitempty" tf:"computed"`
15-
Job *Job `json:"job_settings,omitempty" tf:"computed"`
13+
Id string `json:"id,omitempty" tf:"computed"`
14+
JobId string `json:"job_id,omitempty" tf:"computed"`
15+
Name string `json:"name,omitempty" tf:"computed"`
16+
JobName string `json:"job_name,omitempty" tf:"computed"`
17+
Job *Job `json:"job_settings,omitempty" tf:"computed"`
1618
}
1719
return common.DataResource(queryableJobData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error {
1820
data := e.(*queryableJobData)
1921
jobsAPI := NewJobsAPI(ctx, c)
2022
var list []Job
2123
var err error
24+
if data.Id == "" {
25+
data.Id = data.JobId
26+
}
27+
if data.Name == "" {
28+
data.Name = data.JobName
29+
}
2230
if data.Name != "" {
2331
// if name is provided, need to list all jobs ny name
2432
list, err = jobsAPI.ListByName(data.Name, true)
@@ -43,6 +51,7 @@ func DataSourceJob() *schema.Resource {
4351
data.Job = &currentJob
4452
data.Name = currentJobName
4553
data.Id = currentJobId
54+
data.JobId = currentJobId
4655
return nil // break the loop after we found the job
4756
}
4857
}

jobs/data_job_test.go

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -76,9 +76,10 @@ func TestDataSourceQueryableJobMatchesId(t *testing.T) {
7676
New: true,
7777
NonWritable: true,
7878
HCL: `job_id = "234"`,
79-
ID: "_",
79+
ID: "234",
8080
}.ApplyAndExpectData(t, map[string]any{
8181
"job_id": "234",
82+
"id": "234",
8283
"job_settings.0.settings.0.name": "Second",
8384
})
8485
}
@@ -90,9 +91,10 @@ func TestDataSourceQueryableJobMatchesName(t *testing.T) {
9091
Read: true,
9192
NonWritable: true,
9293
HCL: `job_name = "First"`,
93-
ID: "_",
94+
ID: "123",
9495
}.ApplyAndExpectData(t, map[string]any{
9596
"job_id": "123",
97+
"id": "123",
9698
"job_settings.0.settings.0.name": "First",
9799
})
98100
}
@@ -132,7 +134,7 @@ func TestDataSourceQueryableJobNoMatchId(t *testing.T) {
132134
Resource: DataSourceJob(),
133135
Read: true,
134136
NonWritable: true,
135-
HCL: `job_id= "567"`,
137+
HCL: `id= "567"`,
136138
ID: "_",
137139
}.ExpectError(t, "Job 567 does not exist.")
138140
}

0 commit comments

Comments
 (0)