From 854abf8e6117e58c7f53f5e11b17c4d62e7fa865 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 4 Nov 2025 13:25:41 +0530 Subject: [PATCH 1/3] [Internal] Add remaining data sources to unified provider --- docs/data-sources/current_config.md | 5 ++ docs/data-sources/current_user.md | 5 ++ docs/data-sources/dbfs_file.md | 2 + docs/data-sources/dbfs_file_paths.md | 2 + docs/data-sources/group.md | 2 + docs/data-sources/instance_pool.md | 2 + docs/data-sources/job.md | 5 ++ docs/data-sources/mws_credentials.md | 5 ++ docs/data-sources/mws_workspaces.md | 5 ++ docs/data-sources/notebook_paths.md | 2 + docs/data-sources/service_principal.md | 2 + docs/data-sources/service_principals.md | 2 + docs/data-sources/user.md | 2 + jobs/data_job.go | 102 ++++++++++++++---------- mws/data_current_config.go | 57 ++++++++----- mws/data_mws_credentials.go | 47 +++++++---- mws/data_mws_workspaces.go | 47 +++++++---- pools/data_instance_pool.go | 8 +- scim/data_current_user.go | 69 +++++++++------- scim/data_group.go | 11 ++- scim/data_service_principal.go | 9 ++- scim/data_service_principals.go | 57 ++++++++----- scim/data_user.go | 97 +++++++++++----------- storage/data_dbfs_file.go | 51 +++++++----- storage/data_dbfs_file_paths.go | 67 +++++++++------- workspace/data_notebook_paths.go | 67 +++++++++------- 26 files changed, 455 insertions(+), 275 deletions(-) diff --git a/docs/data-sources/current_config.md b/docs/data-sources/current_config.md index 189adf4317..7947fcdc0d 100644 --- a/docs/data-sources/current_config.md +++ b/docs/data-sources/current_config.md @@ -39,6 +39,11 @@ resource "databricks_storage_credential" "external" { } ``` +## Argument Reference + +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + ## Exported attributes Data source exposes the following attributes: diff --git a/docs/data-sources/current_user.md b/docs/data-sources/current_user.md index 88410b5c09..c3154bfc04 100644 --- a/docs/data-sources/current_user.md +++ b/docs/data-sources/current_user.md @@ -55,6 +55,11 @@ output "job_url" { } ``` +## Argument Reference + +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + ## Exported attributes Data source exposes the following attributes: diff --git a/docs/data-sources/dbfs_file.md b/docs/data-sources/dbfs_file.md index c660924917..2b58d67dc3 100644 --- a/docs/data-sources/dbfs_file.md +++ b/docs/data-sources/dbfs_file.md @@ -20,6 +20,8 @@ data "databricks_dbfs_file" "report" { * `path` - (Required) Path on DBFS for the file from which to get content. * `limit_file_size` - (Required - boolean) Do not load content for files larger than 4MB. +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/dbfs_file_paths.md b/docs/data-sources/dbfs_file_paths.md index db878dfb7c..b31825e13e 100644 --- a/docs/data-sources/dbfs_file_paths.md +++ b/docs/data-sources/dbfs_file_paths.md @@ -20,6 +20,8 @@ data "databricks_dbfs_file_paths" "partitions" { * `path` - (Required) Path on DBFS for the file to perform listing * `recursive` - (Required) Either or not recursively list all files +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/group.md b/docs/data-sources/group.md index 8f1f0ea390..ee94331752 100644 --- a/docs/data-sources/group.md +++ b/docs/data-sources/group.md @@ -32,6 +32,8 @@ Data source allows you to pick groups by the following attributes * `display_name` - (Required) Display name of the group. The group must exist before this resource can be planned. * `recursive` - (Optional) Collect information for all nested groups. *Defaults to true.* +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/instance_pool.md b/docs/data-sources/instance_pool.md index 2b7ef105fd..389edcc8b9 100644 --- a/docs/data-sources/instance_pool.md +++ b/docs/data-sources/instance_pool.md @@ -28,6 +28,8 @@ resource "databricks_cluster" "my_cluster" { Data source allows you to pick instance pool by the following attribute - `name` - Name of the instance pool. The instance pool must exist before this resource can be planned. +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/job.md b/docs/data-sources/job.md index 56534d98c7..4eceb357f6 100755 --- a/docs/data-sources/job.md +++ b/docs/data-sources/job.md @@ -22,6 +22,11 @@ output "job_num_workers" { } ``` +## Argument Reference + +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + ## Attribute Reference This data source exports the following attributes: diff --git a/docs/data-sources/mws_credentials.md b/docs/data-sources/mws_credentials.md index 6e9c0f8e90..dbb308a455 100755 --- a/docs/data-sources/mws_credentials.md +++ b/docs/data-sources/mws_credentials.md @@ -24,6 +24,11 @@ output "all_mws_credentials" { } ``` +## Argument Reference + +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + ## Attribute Reference -> This resource has an evolving interface, which may change in future versions of the provider. diff --git a/docs/data-sources/mws_workspaces.md b/docs/data-sources/mws_workspaces.md index d3bbb3f11d..2f9bde0ac2 100755 --- a/docs/data-sources/mws_workspaces.md +++ b/docs/data-sources/mws_workspaces.md @@ -24,6 +24,11 @@ output "all_mws_workspaces" { } ``` +## Argument Reference + +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. + ## Attribute Reference -> This resource has an evolving interface, which may change in future versions of the provider. diff --git a/docs/data-sources/notebook_paths.md b/docs/data-sources/notebook_paths.md index 29821a9509..6c374609af 100644 --- a/docs/data-sources/notebook_paths.md +++ b/docs/data-sources/notebook_paths.md @@ -20,6 +20,8 @@ data "databricks_notebook_paths" "prod" { * `path` - (Required) Path to workspace directory * `recursive` - (Required) Either or recursively walk given path +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/service_principal.md b/docs/data-sources/service_principal.md index 245e80bd6a..20f5f97a94 100644 --- a/docs/data-sources/service_principal.md +++ b/docs/data-sources/service_principal.md @@ -34,6 +34,8 @@ Data source allows you to pick service principals by one of the following attrib - `application_id` - (Required if neither `display_name` nor `scim_id` is used) Application ID of the service principal. The service principal must exist before this resource can be retrieved. - `display_name` - (Required if neither `application_id` nor `scim_id` is used) Exact display name of the service principal. The service principal must exist before this resource can be retrieved. In case if there are several service principals with the same name, an error is thrown. - `scim_id` - (Required if neither `application_id` nor `display_name` is used) Unique SCIM ID for a service principal in the Databricks workspace. The service principal must exist before this resource can be retrieved. +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/service_principals.md b/docs/data-sources/service_principals.md index c841a5ace1..126ae25ba3 100644 --- a/docs/data-sources/service_principals.md +++ b/docs/data-sources/service_principals.md @@ -38,6 +38,8 @@ resource "databricks_group_member" "my_member_spn" { Data source allows you to pick service principals by the following attributes - `display_name_contains` - (Optional) Only return [databricks_service_principal](service_principal.md) display name that match the given name string +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/data-sources/user.md b/docs/data-sources/user.md index d129316834..b21affeb8e 100644 --- a/docs/data-sources/user.md +++ b/docs/data-sources/user.md @@ -33,6 +33,8 @@ Data source allows you to pick groups by the following attributes - `user_name` - (Optional) User name of the user. The user must exist before this resource can be planned. - `user_id` - (Optional) ID of the user. +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/jobs/data_job.go b/jobs/data_job.go index 5559325e4e..cb006675d6 100755 --- a/jobs/data_job.go +++ b/jobs/data_job.go @@ -5,6 +5,7 @@ import ( "fmt" "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceJob() common.Resource { @@ -15,48 +16,67 @@ func DataSourceJob() common.Resource { JobName string `json:"job_name,omitempty" tf:"computed"` Job *Job `json:"job_settings,omitempty" tf:"computed"` } - return common.DataResource(queryableJobData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { - data := e.(*queryableJobData) - jobsAPI := NewJobsAPI(ctx, c) - var list []Job - var err error - if data.Id == "" { - data.Id = data.JobId - } - if data.Name == "" { - data.Name = data.JobName - } - if data.Name != "" { - // if name is provided, need to list all jobs ny name - list, err = jobsAPI.ListByName(data.Name, true) - } else { - // otherwise, just read the job - var job Job - job, err = jobsAPI.Read(data.Id) + s := common.StructToSchema(queryableJobData{}, nil) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) if err != nil { return err } - data.Job = &job - data.Name = job.Settings.Name - } - if err != nil { - return err - } - for _, job := range list { - currentJob := job // De-referencing the temp variable used by the loop - currentJobId := currentJob.ID() - currentJobName := currentJob.Settings.Name - if currentJobName == data.Name || currentJobId == data.Id { - data.Job = ¤tJob - data.Name = currentJobName - data.Id = currentJobId - data.JobId = currentJobId - return nil // break the loop after we found the job - } - } - if data.Job == nil { - return fmt.Errorf("no job found with specified name") - } - return nil - }) + var data queryableJobData + common.DataToStructPointer(d, s, &data) + jobsAPI := NewJobsAPI(ctx, newClient) + var list []Job + if data.Id == "" { + data.Id = data.JobId + } + if data.Name == "" { + data.Name = data.JobName + } + if data.Name != "" { + // if name is provided, need to list all jobs ny name + list, err = jobsAPI.ListByName(data.Name, true) + } else { + // otherwise, just read the job + var job Job + job, err = jobsAPI.Read(data.Id) + if err != nil { + return err + } + data.Job = &job + data.Name = job.Settings.Name + } + if err != nil { + return err + } + for _, job := range list { + currentJob := job // De-referencing the temp variable used by the loop + currentJobId := currentJob.ID() + currentJobName := currentJob.Settings.Name + if currentJobName == data.Name || currentJobId == data.Id { + data.Job = ¤tJob + data.Name = currentJobName + data.Id = currentJobId + data.JobId = currentJobId + break + } + } + if data.Job == nil { + return fmt.Errorf("no job found with specified name") + } + err = common.StructToData(data, s, d) + if err != nil { + return err + } + if data.Id != "" { + d.SetId(data.Id) + } else { + d.SetId("_") + } + return nil + }, + } } diff --git a/mws/data_current_config.go b/mws/data_current_config.go index 4e45a0b656..c0f71af35f 100644 --- a/mws/data_current_config.go +++ b/mws/data_current_config.go @@ -4,6 +4,7 @@ import ( "context" "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) type currentConfig struct { @@ -15,24 +16,40 @@ type currentConfig struct { } func DataSourceCurrentConfiguration() common.Resource { - return common.DataResource(currentConfig{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { - data := e.(*currentConfig) - data.IsAccount = false - if c.Config.IsAccountClient() { - data.AccountId = c.Config.AccountID - data.IsAccount = true - } - data.Host = c.Config.Host - if c.Config.IsAws() { - data.CloudType = "aws" - } else if c.Config.IsAzure() { - data.CloudType = "azure" - } else if c.Config.IsGcp() { - data.CloudType = "gcp" - } else { - data.CloudType = "unknown" - } - data.AuthType = c.Config.AuthType - return nil - }) + s := common.StructToSchema(currentConfig{}, nil) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } + var data currentConfig + common.DataToStructPointer(d, s, &data) + data.IsAccount = false + if newClient.Config.IsAccountClient() { + data.AccountId = newClient.Config.AccountID + data.IsAccount = true + } + data.Host = newClient.Config.Host + if newClient.Config.IsAws() { + data.CloudType = "aws" + } else if newClient.Config.IsAzure() { + data.CloudType = "azure" + } else if newClient.Config.IsGcp() { + data.CloudType = "gcp" + } else { + data.CloudType = "unknown" + } + data.AuthType = newClient.Config.AuthType + err = common.StructToData(data, s, d) + if err != nil { + return err + } + d.SetId("_") + return nil + }, + } } diff --git a/mws/data_mws_credentials.go b/mws/data_mws_credentials.go index aa757fd858..3c632e84b7 100755 --- a/mws/data_mws_credentials.go +++ b/mws/data_mws_credentials.go @@ -5,25 +5,42 @@ import ( "fmt" "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceMwsCredentials() common.Resource { type mwsCredentialsData struct { Ids map[string]string `json:"ids,omitempty" tf:"computed"` } - return common.DataResource(mwsCredentialsData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { - data := e.(*mwsCredentialsData) - if c.Config.AccountID == "" { - return fmt.Errorf("provider block is missing `account_id` property") - } - credentials, err := NewCredentialsAPI(ctx, c).List(c.Config.AccountID) - if err != nil { - return err - } - data.Ids = make(map[string]string) - for _, v := range credentials { - data.Ids[v.CredentialsName] = v.CredentialsID - } - return nil - }) + s := common.StructToSchema(mwsCredentialsData{}, nil) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } + var data mwsCredentialsData + common.DataToStructPointer(d, s, &data) + if newClient.Config.AccountID == "" { + return fmt.Errorf("provider block is missing `account_id` property") + } + credentials, err := NewCredentialsAPI(ctx, newClient).List(newClient.Config.AccountID) + if err != nil { + return err + } + data.Ids = make(map[string]string) + for _, v := range credentials { + data.Ids[v.CredentialsName] = v.CredentialsID + } + err = common.StructToData(data, s, d) + if err != nil { + return err + } + d.SetId("_") + return nil + }, + } } diff --git a/mws/data_mws_workspaces.go b/mws/data_mws_workspaces.go index 4c2d48a9a4..86e7f5746e 100755 --- a/mws/data_mws_workspaces.go +++ b/mws/data_mws_workspaces.go @@ -5,25 +5,42 @@ import ( "fmt" "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceMwsWorkspaces() common.Resource { type mwsWorkspacesData struct { Ids map[string]int64 `json:"ids" tf:"computed"` } - return common.DataResource(mwsWorkspacesData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { - data := e.(*mwsWorkspacesData) - if c.Config.AccountID == "" { - return fmt.Errorf("provider block is missing `account_id` property") - } - workspaces, err := NewWorkspacesAPI(ctx, c).List(c.Config.AccountID) - if err != nil { - return err - } - data.Ids = map[string]int64{} - for _, v := range workspaces { - data.Ids[v.WorkspaceName] = v.WorkspaceID - } - return nil - }) + s := common.StructToSchema(mwsWorkspacesData{}, nil) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } + var data mwsWorkspacesData + common.DataToStructPointer(d, s, &data) + if newClient.Config.AccountID == "" { + return fmt.Errorf("provider block is missing `account_id` property") + } + workspaces, err := NewWorkspacesAPI(ctx, newClient).List(newClient.Config.AccountID) + if err != nil { + return err + } + data.Ids = map[string]int64{} + for _, v := range workspaces { + data.Ids[v.WorkspaceName] = v.WorkspaceID + } + err = common.StructToData(data, s, d) + if err != nil { + return err + } + d.SetId("_") + return nil + }, + } } diff --git a/pools/data_instance_pool.go b/pools/data_instance_pool.go index 1c733246d0..95b42fc9be 100644 --- a/pools/data_instance_pool.go +++ b/pools/data_instance_pool.go @@ -29,11 +29,17 @@ func DataSourceInstancePool() common.Resource { Attributes *InstancePoolAndStats `json:"pool_info,omitempty" tf:"computed"` } s := common.StructToSchema(poolDetails{}, nil) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) return common.Resource{ Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } name := d.Get("name").(string) - poolsAPI := NewInstancePoolsAPI(ctx, m) + poolsAPI := NewInstancePoolsAPI(ctx, newClient) pool, err := getPool(poolsAPI, name) if err != nil { return err diff --git a/scim/data_current_user.go b/scim/data_current_user.go index 7a6066c1bd..192663c4fe 100644 --- a/scim/data_current_user.go +++ b/scim/data_current_user.go @@ -14,39 +14,46 @@ var nonAlphanumeric = regexp.MustCompile(`\W`) // DataSourceCurrentUser returns information about caller identity func DataSourceCurrentUser() common.Resource { - return common.Resource{ - Schema: map[string]*schema.Schema{ - "user_name": { - Type: schema.TypeString, - Computed: true, - }, - "home": { - Type: schema.TypeString, - Computed: true, - }, - "repos": { - Type: schema.TypeString, - Computed: true, - }, - "alphanumeric": { - Type: schema.TypeString, - Computed: true, - }, - "external_id": { - Type: schema.TypeString, - Computed: true, - }, - "workspace_url": { - Type: schema.TypeString, - Computed: true, - }, - "acl_principal_id": { - Type: schema.TypeString, - Computed: true, - }, + s := map[string]*schema.Schema{ + "user_name": { + Type: schema.TypeString, + Computed: true, + }, + "home": { + Type: schema.TypeString, + Computed: true, + }, + "repos": { + Type: schema.TypeString, + Computed: true, + }, + "alphanumeric": { + Type: schema.TypeString, + Computed: true, + }, + "external_id": { + Type: schema.TypeString, + Computed: true, + }, + "workspace_url": { + Type: schema.TypeString, + Computed: true, }, + "acl_principal_id": { + Type: schema.TypeString, + Computed: true, + }, + } + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - w, err := c.WorkspaceClient() + newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } + w, err := newClient.WorkspaceClient() if err != nil { return err } diff --git a/scim/data_group.go b/scim/data_group.go index b0512ef840..f5c403e7f9 100644 --- a/scim/data_group.go +++ b/scim/data_group.go @@ -35,17 +35,22 @@ func DataSourceGroup() common.Resource { addEntitlementsToSchema(s) return s }) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) return common.Resource{ Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } var this entity var group Group - var err error common.DataToStructPointer(d, s, &this) - groupsAPI := NewGroupsAPI(ctx, m) + groupsAPI := NewGroupsAPI(ctx, newClient) groupAttributes := "displayName,members,roles,entitlements,externalId,groups" - if m.DatabricksClient.Config.IsAccountClient() { + if newClient.DatabricksClient.Config.IsAccountClient() { group, err = groupsAPI.ReadByDisplayName(this.DisplayName, "id") if err != nil { return err diff --git a/scim/data_service_principal.go b/scim/data_service_principal.go index 87b4f3a83c..a6a3e391c6 100644 --- a/scim/data_service_principal.go +++ b/scim/data_service_principal.go @@ -30,16 +30,21 @@ func DataSourceServicePrincipal() common.Resource { s["scim_id"].ExactlyOneOf = []string{"application_id", "display_name", "scim_id"} return s }) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) return common.Resource{ Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } var response spnData var spList []User - var err error common.DataToStructPointer(d, s, &response) - spnAPI := NewServicePrincipalsAPI(ctx, m) + spnAPI := NewServicePrincipalsAPI(ctx, newClient) if response.ApplicationID != "" { spList, err = spnAPI.Filter(fmt.Sprintf(`applicationId eq "%s"`, response.ApplicationID), true) diff --git a/scim/data_service_principals.go b/scim/data_service_principals.go index f01d56795b..2ce52d1ef0 100644 --- a/scim/data_service_principals.go +++ b/scim/data_service_principals.go @@ -6,6 +6,7 @@ import ( "sort" "github.com/databricks/terraform-provider-databricks/common" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) // DataSourceServicePrincipals searches for service principals based on display_name @@ -14,26 +15,42 @@ func DataSourceServicePrincipals() common.Resource { DisplayNameContains string `json:"display_name_contains,omitempty" tf:"computed"` ApplicationIDs []string `json:"application_ids,omitempty" tf:"computed,slice_set"` } - return common.DataResource(spnsData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { - response := e.(*spnsData) - spnAPI := NewServicePrincipalsAPI(ctx, c) + s := common.StructToSchema(spnsData{}, nil) + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, + Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { + newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } + var response spnsData + common.DataToStructPointer(d, s, &response) + spnAPI := NewServicePrincipalsAPI(ctx, newClient) - var filter string + var filter string - if response.DisplayNameContains != "" { - filter = fmt.Sprintf(`displayName co "%s"`, response.DisplayNameContains) - } - spList, err := spnAPI.Filter(filter, true) - if err != nil { - return err - } - if len(spList) == 0 { - return fmt.Errorf("cannot find SPs with display name containing %s", response.DisplayNameContains) - } - for _, sp := range spList { - response.ApplicationIDs = append(response.ApplicationIDs, sp.ApplicationID) - } - sort.Strings(response.ApplicationIDs) - return nil - }) + if response.DisplayNameContains != "" { + filter = fmt.Sprintf(`displayName co "%s"`, response.DisplayNameContains) + } + spList, err := spnAPI.Filter(filter, true) + if err != nil { + return err + } + if len(spList) == 0 { + return fmt.Errorf("cannot find SPs with display name containing %s", response.DisplayNameContains) + } + for _, sp := range spList { + response.ApplicationIDs = append(response.ApplicationIDs, sp.ApplicationID) + } + sort.Strings(response.ApplicationIDs) + err = common.StructToData(response, s, d) + if err != nil { + return err + } + d.SetId("_") + return nil + }, + } } diff --git a/scim/data_user.go b/scim/data_user.go index 594dde7be7..b8956c9586 100644 --- a/scim/data_user.go +++ b/scim/data_user.go @@ -27,53 +27,60 @@ func getUser(usersAPI UsersAPI, id, name string) (user User, err error) { // DataSourceUser returns information about user specified by user name func DataSourceUser() common.Resource { - return common.Resource{ - Schema: map[string]*schema.Schema{ - "user_name": { - Type: schema.TypeString, - ExactlyOneOf: []string{"user_name", "user_id"}, - Optional: true, - }, - "user_id": { - Type: schema.TypeString, - ExactlyOneOf: []string{"user_name", "user_id"}, - Optional: true, - }, - "home": { - Type: schema.TypeString, - Computed: true, - }, - "repos": { - Type: schema.TypeString, - Computed: true, - }, - "display_name": { - Type: schema.TypeString, - Computed: true, - }, - "alphanumeric": { - Type: schema.TypeString, - Computed: true, - }, - "external_id": { - Type: schema.TypeString, - Computed: true, - }, - "application_id": { - Type: schema.TypeString, - Computed: true, - }, - "acl_principal_id": { - Type: schema.TypeString, - Computed: true, - }, - "active": { - Type: schema.TypeBool, - Computed: true, - }, + s := map[string]*schema.Schema{ + "user_name": { + Type: schema.TypeString, + ExactlyOneOf: []string{"user_name", "user_id"}, + Optional: true, + }, + "user_id": { + Type: schema.TypeString, + ExactlyOneOf: []string{"user_name", "user_id"}, + Optional: true, + }, + "home": { + Type: schema.TypeString, + Computed: true, + }, + "repos": { + Type: schema.TypeString, + Computed: true, + }, + "display_name": { + Type: schema.TypeString, + Computed: true, + }, + "alphanumeric": { + Type: schema.TypeString, + Computed: true, + }, + "external_id": { + Type: schema.TypeString, + Computed: true, + }, + "application_id": { + Type: schema.TypeString, + Computed: true, }, + "acl_principal_id": { + Type: schema.TypeString, + Computed: true, + }, + "active": { + Type: schema.TypeBool, + Computed: true, + }, + } + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) + return common.Resource{ + Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { - usersAPI := NewUsersAPI(ctx, m) + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } + usersAPI := NewUsersAPI(ctx, newClient) user, err := getUser(usersAPI, d.Get("user_id").(string), d.Get("user_name").(string)) if err != nil { return err diff --git a/storage/data_dbfs_file.go b/storage/data_dbfs_file.go index 16c879331e..a1961f11ba 100644 --- a/storage/data_dbfs_file.go +++ b/storage/data_dbfs_file.go @@ -10,10 +10,38 @@ import ( ) func DataSourceDbfsFile() common.Resource { + s := map[string]*schema.Schema{ + "path": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "limit_file_size": { + Type: schema.TypeBool, + Required: true, + ForceNew: true, + }, + "content": { + Type: schema.TypeString, + Computed: true, + ForceNew: true, + }, + "file_size": { + Type: schema.TypeInt, + Computed: true, + }, + } + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) return common.Resource{ + Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } limitFileSize := d.Get("limit_file_size").(bool) - dbfsAPI := NewDbfsAPI(ctx, m) + dbfsAPI := NewDbfsAPI(ctx, newClient) fileInfo, err := dbfsAPI.Status(d.Get("path").(string)) if err != nil { return err @@ -32,26 +60,5 @@ func DataSourceDbfsFile() common.Resource { d.Set("content", base64.StdEncoding.EncodeToString(content)) return nil }, - Schema: map[string]*schema.Schema{ - "path": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "limit_file_size": { - Type: schema.TypeBool, - Required: true, - ForceNew: true, - }, - "content": { - Type: schema.TypeString, - Computed: true, - ForceNew: true, - }, - "file_size": { - Type: schema.TypeInt, - Computed: true, - }, - }, } } diff --git a/storage/data_dbfs_file_paths.go b/storage/data_dbfs_file_paths.go index 563c4986a1..395418d7ec 100644 --- a/storage/data_dbfs_file_paths.go +++ b/storage/data_dbfs_file_paths.go @@ -9,11 +9,47 @@ import ( ) func DataSourceDbfsFilePaths() common.Resource { + s := map[string]*schema.Schema{ + "path": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "recursive": { + Type: schema.TypeBool, + Required: true, + ForceNew: true, + }, + "path_list": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "path": { + Type: schema.TypeString, + Optional: true, + }, + "file_size": { + Type: schema.TypeInt, + Optional: true, + }, + }, + }, + Set: workspace.PathListHash, + }, + } + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) return common.Resource{ + Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } path := d.Get("path").(string) recursive := d.Get("recursive").(bool) - paths, err := NewDbfsAPI(ctx, m).List(path, recursive) + paths, err := NewDbfsAPI(ctx, newClient).List(path, recursive) if err != nil { return err } @@ -29,34 +65,5 @@ func DataSourceDbfsFilePaths() common.Resource { d.Set("path_list", pathList) return nil }, - Schema: map[string]*schema.Schema{ - "path": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "recursive": { - Type: schema.TypeBool, - Required: true, - ForceNew: true, - }, - "path_list": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "path": { - Type: schema.TypeString, - Optional: true, - }, - "file_size": { - Type: schema.TypeInt, - Optional: true, - }, - }, - }, - Set: workspace.PathListHash, - }, - }, } } diff --git a/workspace/data_notebook_paths.go b/workspace/data_notebook_paths.go index cf897284e4..f44ecae4be 100644 --- a/workspace/data_notebook_paths.go +++ b/workspace/data_notebook_paths.go @@ -9,11 +9,47 @@ import ( // DataSourceNotebookPaths ... func DataSourceNotebookPaths() common.Resource { + s := map[string]*schema.Schema{ + "path": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "recursive": { + Type: schema.TypeBool, + Required: true, + ForceNew: true, + }, + "notebook_path_list": { + Type: schema.TypeSet, + Computed: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "language": { + Type: schema.TypeString, + Optional: true, + }, + "path": { + Type: schema.TypeString, + Optional: true, + }, + }, + }, + Set: PathListHash, + }, + } + common.AddNamespaceInSchema(s) + common.NamespaceCustomizeSchemaMap(s) return common.Resource{ + Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *common.DatabricksClient) error { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } path := d.Get("path").(string) recursive := d.Get("recursive").(bool) - notebookList, err := NewNotebooksAPI(ctx, m).List(path, recursive, false) + notebookList, err := NewNotebooksAPI(ctx, newClient).List(path, recursive, false) if err != nil { return err } @@ -31,34 +67,5 @@ func DataSourceNotebookPaths() common.Resource { d.Set("notebook_path_list", notebookPathList) return nil }, - Schema: map[string]*schema.Schema{ - "path": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - "recursive": { - Type: schema.TypeBool, - Required: true, - ForceNew: true, - }, - "notebook_path_list": { - Type: schema.TypeSet, - Computed: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "language": { - Type: schema.TypeString, - Optional: true, - }, - "path": { - Type: schema.TypeString, - Optional: true, - }, - }, - }, - Set: PathListHash, - }, - }, } } From 3d1c141cdbe46b289e945517f7180f4a412504fe Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 4 Nov 2025 13:34:12 +0530 Subject: [PATCH 2/3] - --- common/datasource.go | 8 ++- jobs/data_job.go | 102 +++++++++++++------------------- mws/data_current_config.go | 57 +++++++----------- mws/data_mws_credentials.go | 47 +++++---------- mws/data_mws_workspaces.go | 47 +++++---------- scim/data_service_principals.go | 57 +++++++----------- 6 files changed, 118 insertions(+), 200 deletions(-) diff --git a/common/datasource.go b/common/datasource.go index f8c33c376a..6afbbaf3e5 100644 --- a/common/datasource.go +++ b/common/datasource.go @@ -15,12 +15,18 @@ func DataResource(sc any, read func(context.Context, any, *DatabricksClient) err s := StructToSchema(sc, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m }) + AddNamespaceInSchema(s) + NamespaceCustomizeSchemaMap(s) return Resource{ Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, m *DatabricksClient) (err error) { + newClient, err := m.DatabricksClientForUnifiedProvider(ctx, d) + if err != nil { + return err + } ptr := reflect.New(reflect.ValueOf(sc).Type()) DataToReflectValue(d, s, ptr.Elem()) - err = read(ctx, ptr.Interface(), m) + err = read(ctx, ptr.Interface(), newClient) if err != nil { err = nicerError(ctx, err, "read data") } diff --git a/jobs/data_job.go b/jobs/data_job.go index cb006675d6..5559325e4e 100755 --- a/jobs/data_job.go +++ b/jobs/data_job.go @@ -5,7 +5,6 @@ import ( "fmt" "github.com/databricks/terraform-provider-databricks/common" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceJob() common.Resource { @@ -16,67 +15,48 @@ func DataSourceJob() common.Resource { JobName string `json:"job_name,omitempty" tf:"computed"` Job *Job `json:"job_settings,omitempty" tf:"computed"` } - s := common.StructToSchema(queryableJobData{}, nil) - common.AddNamespaceInSchema(s) - common.NamespaceCustomizeSchemaMap(s) - return common.Resource{ - Schema: s, - Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) + return common.DataResource(queryableJobData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { + data := e.(*queryableJobData) + jobsAPI := NewJobsAPI(ctx, c) + var list []Job + var err error + if data.Id == "" { + data.Id = data.JobId + } + if data.Name == "" { + data.Name = data.JobName + } + if data.Name != "" { + // if name is provided, need to list all jobs ny name + list, err = jobsAPI.ListByName(data.Name, true) + } else { + // otherwise, just read the job + var job Job + job, err = jobsAPI.Read(data.Id) if err != nil { return err } - var data queryableJobData - common.DataToStructPointer(d, s, &data) - jobsAPI := NewJobsAPI(ctx, newClient) - var list []Job - if data.Id == "" { - data.Id = data.JobId - } - if data.Name == "" { - data.Name = data.JobName - } - if data.Name != "" { - // if name is provided, need to list all jobs ny name - list, err = jobsAPI.ListByName(data.Name, true) - } else { - // otherwise, just read the job - var job Job - job, err = jobsAPI.Read(data.Id) - if err != nil { - return err - } - data.Job = &job - data.Name = job.Settings.Name - } - if err != nil { - return err - } - for _, job := range list { - currentJob := job // De-referencing the temp variable used by the loop - currentJobId := currentJob.ID() - currentJobName := currentJob.Settings.Name - if currentJobName == data.Name || currentJobId == data.Id { - data.Job = ¤tJob - data.Name = currentJobName - data.Id = currentJobId - data.JobId = currentJobId - break - } - } - if data.Job == nil { - return fmt.Errorf("no job found with specified name") - } - err = common.StructToData(data, s, d) - if err != nil { - return err - } - if data.Id != "" { - d.SetId(data.Id) - } else { - d.SetId("_") - } - return nil - }, - } + data.Job = &job + data.Name = job.Settings.Name + } + if err != nil { + return err + } + for _, job := range list { + currentJob := job // De-referencing the temp variable used by the loop + currentJobId := currentJob.ID() + currentJobName := currentJob.Settings.Name + if currentJobName == data.Name || currentJobId == data.Id { + data.Job = ¤tJob + data.Name = currentJobName + data.Id = currentJobId + data.JobId = currentJobId + return nil // break the loop after we found the job + } + } + if data.Job == nil { + return fmt.Errorf("no job found with specified name") + } + return nil + }) } diff --git a/mws/data_current_config.go b/mws/data_current_config.go index c0f71af35f..4e45a0b656 100644 --- a/mws/data_current_config.go +++ b/mws/data_current_config.go @@ -4,7 +4,6 @@ import ( "context" "github.com/databricks/terraform-provider-databricks/common" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) type currentConfig struct { @@ -16,40 +15,24 @@ type currentConfig struct { } func DataSourceCurrentConfiguration() common.Resource { - s := common.StructToSchema(currentConfig{}, nil) - common.AddNamespaceInSchema(s) - common.NamespaceCustomizeSchemaMap(s) - return common.Resource{ - Schema: s, - Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) - if err != nil { - return err - } - var data currentConfig - common.DataToStructPointer(d, s, &data) - data.IsAccount = false - if newClient.Config.IsAccountClient() { - data.AccountId = newClient.Config.AccountID - data.IsAccount = true - } - data.Host = newClient.Config.Host - if newClient.Config.IsAws() { - data.CloudType = "aws" - } else if newClient.Config.IsAzure() { - data.CloudType = "azure" - } else if newClient.Config.IsGcp() { - data.CloudType = "gcp" - } else { - data.CloudType = "unknown" - } - data.AuthType = newClient.Config.AuthType - err = common.StructToData(data, s, d) - if err != nil { - return err - } - d.SetId("_") - return nil - }, - } + return common.DataResource(currentConfig{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { + data := e.(*currentConfig) + data.IsAccount = false + if c.Config.IsAccountClient() { + data.AccountId = c.Config.AccountID + data.IsAccount = true + } + data.Host = c.Config.Host + if c.Config.IsAws() { + data.CloudType = "aws" + } else if c.Config.IsAzure() { + data.CloudType = "azure" + } else if c.Config.IsGcp() { + data.CloudType = "gcp" + } else { + data.CloudType = "unknown" + } + data.AuthType = c.Config.AuthType + return nil + }) } diff --git a/mws/data_mws_credentials.go b/mws/data_mws_credentials.go index 3c632e84b7..aa757fd858 100755 --- a/mws/data_mws_credentials.go +++ b/mws/data_mws_credentials.go @@ -5,42 +5,25 @@ import ( "fmt" "github.com/databricks/terraform-provider-databricks/common" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceMwsCredentials() common.Resource { type mwsCredentialsData struct { Ids map[string]string `json:"ids,omitempty" tf:"computed"` } - s := common.StructToSchema(mwsCredentialsData{}, nil) - common.AddNamespaceInSchema(s) - common.NamespaceCustomizeSchemaMap(s) - return common.Resource{ - Schema: s, - Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) - if err != nil { - return err - } - var data mwsCredentialsData - common.DataToStructPointer(d, s, &data) - if newClient.Config.AccountID == "" { - return fmt.Errorf("provider block is missing `account_id` property") - } - credentials, err := NewCredentialsAPI(ctx, newClient).List(newClient.Config.AccountID) - if err != nil { - return err - } - data.Ids = make(map[string]string) - for _, v := range credentials { - data.Ids[v.CredentialsName] = v.CredentialsID - } - err = common.StructToData(data, s, d) - if err != nil { - return err - } - d.SetId("_") - return nil - }, - } + return common.DataResource(mwsCredentialsData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { + data := e.(*mwsCredentialsData) + if c.Config.AccountID == "" { + return fmt.Errorf("provider block is missing `account_id` property") + } + credentials, err := NewCredentialsAPI(ctx, c).List(c.Config.AccountID) + if err != nil { + return err + } + data.Ids = make(map[string]string) + for _, v := range credentials { + data.Ids[v.CredentialsName] = v.CredentialsID + } + return nil + }) } diff --git a/mws/data_mws_workspaces.go b/mws/data_mws_workspaces.go index 86e7f5746e..4c2d48a9a4 100755 --- a/mws/data_mws_workspaces.go +++ b/mws/data_mws_workspaces.go @@ -5,42 +5,25 @@ import ( "fmt" "github.com/databricks/terraform-provider-databricks/common" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) func DataSourceMwsWorkspaces() common.Resource { type mwsWorkspacesData struct { Ids map[string]int64 `json:"ids" tf:"computed"` } - s := common.StructToSchema(mwsWorkspacesData{}, nil) - common.AddNamespaceInSchema(s) - common.NamespaceCustomizeSchemaMap(s) - return common.Resource{ - Schema: s, - Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) - if err != nil { - return err - } - var data mwsWorkspacesData - common.DataToStructPointer(d, s, &data) - if newClient.Config.AccountID == "" { - return fmt.Errorf("provider block is missing `account_id` property") - } - workspaces, err := NewWorkspacesAPI(ctx, newClient).List(newClient.Config.AccountID) - if err != nil { - return err - } - data.Ids = map[string]int64{} - for _, v := range workspaces { - data.Ids[v.WorkspaceName] = v.WorkspaceID - } - err = common.StructToData(data, s, d) - if err != nil { - return err - } - d.SetId("_") - return nil - }, - } + return common.DataResource(mwsWorkspacesData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { + data := e.(*mwsWorkspacesData) + if c.Config.AccountID == "" { + return fmt.Errorf("provider block is missing `account_id` property") + } + workspaces, err := NewWorkspacesAPI(ctx, c).List(c.Config.AccountID) + if err != nil { + return err + } + data.Ids = map[string]int64{} + for _, v := range workspaces { + data.Ids[v.WorkspaceName] = v.WorkspaceID + } + return nil + }) } diff --git a/scim/data_service_principals.go b/scim/data_service_principals.go index 2ce52d1ef0..f01d56795b 100644 --- a/scim/data_service_principals.go +++ b/scim/data_service_principals.go @@ -6,7 +6,6 @@ import ( "sort" "github.com/databricks/terraform-provider-databricks/common" - "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" ) // DataSourceServicePrincipals searches for service principals based on display_name @@ -15,42 +14,26 @@ func DataSourceServicePrincipals() common.Resource { DisplayNameContains string `json:"display_name_contains,omitempty" tf:"computed"` ApplicationIDs []string `json:"application_ids,omitempty" tf:"computed,slice_set"` } - s := common.StructToSchema(spnsData{}, nil) - common.AddNamespaceInSchema(s) - common.NamespaceCustomizeSchemaMap(s) - return common.Resource{ - Schema: s, - Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) - if err != nil { - return err - } - var response spnsData - common.DataToStructPointer(d, s, &response) - spnAPI := NewServicePrincipalsAPI(ctx, newClient) + return common.DataResource(spnsData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error { + response := e.(*spnsData) + spnAPI := NewServicePrincipalsAPI(ctx, c) - var filter string + var filter string - if response.DisplayNameContains != "" { - filter = fmt.Sprintf(`displayName co "%s"`, response.DisplayNameContains) - } - spList, err := spnAPI.Filter(filter, true) - if err != nil { - return err - } - if len(spList) == 0 { - return fmt.Errorf("cannot find SPs with display name containing %s", response.DisplayNameContains) - } - for _, sp := range spList { - response.ApplicationIDs = append(response.ApplicationIDs, sp.ApplicationID) - } - sort.Strings(response.ApplicationIDs) - err = common.StructToData(response, s, d) - if err != nil { - return err - } - d.SetId("_") - return nil - }, - } + if response.DisplayNameContains != "" { + filter = fmt.Sprintf(`displayName co "%s"`, response.DisplayNameContains) + } + spList, err := spnAPI.Filter(filter, true) + if err != nil { + return err + } + if len(spList) == 0 { + return fmt.Errorf("cannot find SPs with display name containing %s", response.DisplayNameContains) + } + for _, sp := range spList { + response.ApplicationIDs = append(response.ApplicationIDs, sp.ApplicationID) + } + sort.Strings(response.ApplicationIDs) + return nil + }) } From 7080f07f6600524c0bfde2cbef2fb29ae4e69803 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 4 Nov 2025 13:52:15 +0530 Subject: [PATCH 3/3] - --- jobs/data_job_acc_test.go | 103 ++++++++++++++++++++++++++++++++++++-- scim/data_current_user.go | 6 +-- 2 files changed, 100 insertions(+), 9 deletions(-) diff --git a/jobs/data_job_acc_test.go b/jobs/data_job_acc_test.go index 4733280fbd..683d7cea8a 100755 --- a/jobs/data_job_acc_test.go +++ b/jobs/data_job_acc_test.go @@ -1,14 +1,19 @@ package jobs_test import ( + "context" + "fmt" + "regexp" + "strconv" "testing" + "github.com/databricks/databricks-sdk-go" "github.com/databricks/terraform-provider-databricks/internal/acceptance" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/stretchr/testify/require" ) -func TestAccDataSourceJob(t *testing.T) { - acceptance.WorkspaceLevel(t, acceptance.Step{ - Template: ` +const dataSourceJobTemplate = ` data "databricks_current_user" "me" {} data "databricks_spark_version" "latest" {} data "databricks_node_type" "smallest" { @@ -26,7 +31,7 @@ func TestAccDataSourceJob(t *testing.T) { } resource "databricks_job" "this" { - name = "job-datasource-acceptance-test" + name = "job-datasource-acceptance-test-{var.RANDOM}" job_cluster { job_cluster_key = "j" @@ -52,9 +57,99 @@ func TestAccDataSourceJob(t *testing.T) { } } +` +func TestAccDataSourceJob(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: dataSourceJobTemplate + ` data "databricks_job" "this" { job_name = databricks_job.this.name }`, }) } + +func TestAccDataSourceJob_InvalidID(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: ` + data "databricks_job" "this" { + job_name = "job-{var.RANDOM}" + provider_config { + workspace_id = "invalid" + } + }`, + ExpectError: regexp.MustCompile(`workspace_id must be a positive integer without leading zeros`), + PlanOnly: true, + }) +} + +func TestAccDataSourceJob_MismatchedID(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: ` + data "databricks_job" "this" { + job_name = "job-{var.RANDOM}" + provider_config { + workspace_id = "123" + } + }`, + ExpectError: regexp.MustCompile(`workspace_id mismatch.*please check the workspace_id provided in provider_config`), + }) +} + +func TestAccDataSourceJob_EmptyID(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: ` + data "databricks_job" "this" { + job_name = "job-{var.RANDOM}" + provider_config { + workspace_id = "" + } + }`, + ExpectError: regexp.MustCompile(`expected "provider_config.0.workspace_id" to not be an empty string`), + }) +} + +func TestAccDataSourceJob_EmptyBlock(t *testing.T) { + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: ` + data "databricks_job" "this" { + job_name = "job-{var.RANDOM}" + provider_config { + } + }`, + ExpectError: regexp.MustCompile(`The argument "workspace_id" is required, but no definition was found.`), + }) +} + +func TestAccDataSourceJobApply(t *testing.T) { + acceptance.LoadWorkspaceEnv(t) + ctx := context.Background() + w := databricks.Must(databricks.NewWorkspaceClient()) + workspaceID, err := w.CurrentWorkspaceID(ctx) + require.NoError(t, err) + workspaceIDStr := strconv.FormatInt(workspaceID, 10) + acceptance.WorkspaceLevel(t, acceptance.Step{ + Template: dataSourceJobTemplate + ` + data "databricks_job" "this" { + job_name = databricks_job.this.name + }`, + }, acceptance.Step{ + Template: dataSourceJobTemplate + fmt.Sprintf(` + data "databricks_job" "this" { + job_name = databricks_job.this.name + provider_config { + workspace_id = "%s" + } + }`, workspaceIDStr), + Check: func(s *terraform.State) error { + r, ok := s.RootModule().Resources["data.databricks_job.this"] + if !ok { + return fmt.Errorf("data not found in state") + } + id := r.Primary.Attributes["provider_config.0.workspace_id"] + if id != workspaceIDStr { + return fmt.Errorf("wrong workspace_id found: %v", r.Primary.Attributes) + } + return nil + }, + }) +} diff --git a/scim/data_current_user.go b/scim/data_current_user.go index 192663c4fe..b166cb3fda 100644 --- a/scim/data_current_user.go +++ b/scim/data_current_user.go @@ -49,11 +49,7 @@ func DataSourceCurrentUser() common.Resource { return common.Resource{ Schema: s, Read: func(ctx context.Context, d *schema.ResourceData, c *common.DatabricksClient) error { - newClient, err := c.DatabricksClientForUnifiedProvider(ctx, d) - if err != nil { - return err - } - w, err := newClient.WorkspaceClient() + w, err := c.WorkspaceClientUnifiedProvider(ctx, d) if err != nil { return err }