Skip to content

Commit 0a81d78

Browse files
edmondopnfx
andauthored
Added databricks_job data resource (#1509)
Example: Getting the existing cluster id of specific databricks_job by name or by id: ``` data "databricks_job" "this" { job_name = "My job" } output "cluster_id" { value = data.databricks_job.job_settings.existing_cluster_id sensitive = false } ``` Co-authored-by: Serge Smertin <[email protected]>
1 parent 15a3aae commit 0a81d78

File tree

6 files changed

+227
-0
lines changed

6 files changed

+227
-0
lines changed

README.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333
| [databricks_instance_profile](docs/resources/instance_profile.md)
3434
| [databricks_ip_access_list](docs/resources/ip_access_list.md)
3535
| [databricks_job](docs/resources/job.md)
36+
| [databricks_job](docs/data-sources/job.md) data
3637
| [databricks_jobs](docs/data-sources/jobs.md)
3738
| [databricks_library](docs/resources/library.md)
3839
| [databricks_metastore](docs/resources/metastore.md)

docs/data-sources/job.md

Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
---
2+
subcategory: "Compute"
3+
---
4+
# databricks_job Data Source
5+
6+
-> **Note** If you have a fully automated setup with workspaces created by [databricks_mws_workspaces](../resources/mws_workspaces.md) or [azurerm_databricks_workspace](https://registry.terraform.io/providers/hashicorp/azurerm/latest/docs/resources/databricks_workspace), please make sure to add [depends_on attribute](../index.md#data-resources-and-authentication-is-not-configured-errors) in order to prevent _authentication is not configured for provider_ errors.
7+
8+
Retrieves the settings of [databricks_job](../resources/job.md) by name or by id. Complements the feature of the [databricks_jobs](jobs.md) data source.
9+
10+
## Example Usage
11+
12+
Getting the existing cluster id of specific [databricks_job](../resources/job.md) by name or by id:
13+
14+
```hcl
15+
data "databricks_job" "this" {
16+
job_name = "My job"
17+
}
18+
19+
output "cluster_id" {
20+
value = data.databricks_job.job_settings.existing_cluster_id
21+
sensitive = false
22+
}
23+
```
24+
25+
## Attribute Reference
26+
27+
This data source exports the following attributes:
28+
29+
* `job_id` - the id of [databricks_job](../resources/job.md) if the resource was matched by name.
30+
* `job_name` - the job name of [databricks_job](../resources/job.md) if the resource was matched by id.
31+
* `job_settings` - the job settings of [databricks_job](../resources/job.md).
32+
33+
## Related Resources
34+
35+
The following resources are used in the same context:
36+
37+
* [databricks_jobs](jobs.md) data to get all jobs and their names from a workspace.
38+
* [databricks_job](../resources/job.md) to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a [databricks_cluster](../resources/cluster.md).

jobs/acceptance/data_job_test.go

Lines changed: 66 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,66 @@
1+
package acceptance
2+
3+
import (
4+
"github.com/databricks/terraform-provider-databricks/internal/acceptance"
5+
6+
"testing"
7+
)
8+
9+
func TestAccDataSourceQueryableJob(t *testing.T) {
10+
acceptance.Test(t, []acceptance.Step{
11+
{
12+
Template: `
13+
data "databricks_current_user" "me" {}
14+
data "databricks_spark_version" "latest" {}
15+
data "databricks_node_type" "smallest" {
16+
local_disk = true
17+
}
18+
19+
resource "databricks_notebook" "this" {
20+
path = "${data.databricks_current_user.me.home}/Terraform{var.RANDOM}"
21+
language = "PYTHON"
22+
content_base64 = base64encode(<<-EOT
23+
# created from ${abspath(path.module)}
24+
display(spark.range(10))
25+
EOT
26+
)
27+
}
28+
29+
resource "databricks_job" "this" {
30+
name = "job-datasource-acceptance-test"
31+
32+
job_cluster {
33+
job_cluster_key = "j"
34+
new_cluster {
35+
num_workers = 20
36+
spark_version = data.databricks_spark_version.latest.id
37+
node_type_id = data.databricks_node_type.smallest.id
38+
}
39+
}
40+
41+
task {
42+
task_key = "a"
43+
44+
new_cluster {
45+
num_workers = 1
46+
spark_version = data.databricks_spark_version.latest.id
47+
node_type_id = data.databricks_node_type.smallest.id
48+
}
49+
50+
notebook_task {
51+
notebook_path = databricks_notebook.this.path
52+
}
53+
}
54+
55+
}
56+
57+
data "databricks_job" "this" {
58+
job_name = databricks_job.this.name
59+
}
60+
61+
output "cluster_workers" {
62+
value = data.databricks_job.this.job_settings[0].settings[0].new_cluster[0].num_workers
63+
}`,
64+
},
65+
})
66+
}

jobs/data_job.go

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
package jobs
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"github.com/databricks/terraform-provider-databricks/common"
7+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
8+
)
9+
10+
func DataSourceJob() *schema.Resource {
11+
type queryableJobData struct {
12+
Id string `json:"job_id,omitempty" tf:"computed"`
13+
Name string `json:"job_name,omitempty" tf:"computed"`
14+
Job *Job `json:"job_settings,omitempty" tf:"computed"`
15+
}
16+
return common.DataResource(queryableJobData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error {
17+
data := e.(*queryableJobData)
18+
jobsAPI := NewJobsAPI(ctx, c)
19+
list, err := jobsAPI.List()
20+
if err != nil {
21+
return err
22+
}
23+
for _, job := range list.Jobs {
24+
currentJob := job // De-referencing the temp variable used by the loop
25+
currentJobId := currentJob.ID()
26+
currentJobName := currentJob.Settings.Name
27+
if currentJobName == data.Name || currentJobId == data.Id {
28+
data.Job = &currentJob
29+
data.Name = currentJobName
30+
data.Id = currentJobId
31+
return nil // break the loop after we found the job
32+
}
33+
}
34+
if data.Job == nil {
35+
return fmt.Errorf("no job found with specified name or id")
36+
}
37+
return nil
38+
})
39+
}

jobs/data_job_test.go

Lines changed: 82 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,82 @@
1+
package jobs
2+
3+
import (
4+
"github.com/databricks/terraform-provider-databricks/qa"
5+
"testing"
6+
)
7+
8+
func commonFixtures() []qa.HTTPFixture {
9+
return []qa.HTTPFixture{
10+
{
11+
Method: "GET",
12+
Resource: "/api/2.0/jobs/list",
13+
Response: JobList{
14+
Jobs: []Job{
15+
{
16+
JobID: 123,
17+
Settings: &JobSettings{
18+
Name: "First",
19+
},
20+
},
21+
{
22+
JobID: 234,
23+
Settings: &JobSettings{
24+
Name: "Second",
25+
},
26+
},
27+
},
28+
},
29+
},
30+
}
31+
32+
}
33+
func TestDataSourceQueryableJobMatchesId(t *testing.T) {
34+
qa.ResourceFixture{
35+
Fixtures: commonFixtures(),
36+
Resource: DataSourceJob(),
37+
Read: true,
38+
New: true,
39+
NonWritable: true,
40+
HCL: `job_id = "234"`,
41+
ID: "_",
42+
}.ApplyAndExpectData(t, map[string]any{
43+
"job_id": "234",
44+
"job_settings.0.settings.0.name": "Second",
45+
})
46+
}
47+
48+
func TestDataSourceQueryableJobMatchesName(t *testing.T) {
49+
qa.ResourceFixture{
50+
Fixtures: commonFixtures(),
51+
Resource: DataSourceJob(),
52+
Read: true,
53+
NonWritable: true,
54+
HCL: `job_name = "First"`,
55+
ID: "_",
56+
}.ApplyAndExpectData(t, map[string]any{
57+
"job_id": "123",
58+
"job_settings.0.settings.0.name": "First",
59+
})
60+
}
61+
62+
func TestDataSourceQueryableJobNoMatchName(t *testing.T) {
63+
qa.ResourceFixture{
64+
Fixtures: commonFixtures(),
65+
Resource: DataSourceJob(),
66+
Read: true,
67+
NonWritable: true,
68+
HCL: `job_name= "Third"`,
69+
ID: "_",
70+
}.ExpectError(t, "no job found with specified name or id")
71+
}
72+
73+
func TestDataSourceQueryableJobNoMatchId(t *testing.T) {
74+
qa.ResourceFixture{
75+
Fixtures: commonFixtures(),
76+
Resource: DataSourceJob(),
77+
Read: true,
78+
NonWritable: true,
79+
HCL: `job_id= "567"`,
80+
ID: "_",
81+
}.ExpectError(t, "no job found with specified name or id")
82+
}

provider/provider.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ func DatabricksProvider() *schema.Provider {
4747
"databricks_dbfs_file_paths": storage.DataSourceDbfsFilePaths(),
4848
"databricks_group": scim.DataSourceGroup(),
4949
"databricks_jobs": jobs.DataSourceJobs(),
50+
"databricks_job": jobs.DataSourceJob(),
5051
"databricks_mws_workspaces": mws.DataSourceMwsWorkspaces(),
5152
"databricks_node_type": clusters.DataSourceNodeType(),
5253
"databricks_notebook": workspace.DataSourceNotebook(),

0 commit comments

Comments
 (0)