From 13a2aa291374bae1ecac12df6239e15ceab3d34e Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Thu, 16 Oct 2025 21:43:19 +0530 Subject: [PATCH 01/27] [Internal] Add provider_config support for plugin framework --- .../pluginfw/tfschema/attribute_converter.go | 8 +- .../pluginfw/tfschema/unified_provider.go | 94 +++++++++++++++++++ 2 files changed, 100 insertions(+), 2 deletions(-) create mode 100644 internal/providers/pluginfw/tfschema/unified_provider.go diff --git a/internal/providers/pluginfw/tfschema/attribute_converter.go b/internal/providers/pluginfw/tfschema/attribute_converter.go index 56efce9569..7865b85aec 100644 --- a/internal/providers/pluginfw/tfschema/attribute_converter.go +++ b/internal/providers/pluginfw/tfschema/attribute_converter.go @@ -7,12 +7,16 @@ type Blockable interface { ToBlock() BlockBuilder } -// convertAttributesToBlocks converts all attributes implementing the Blockable interface to blocks, returning -// a new NestedBlockObject with the converted attributes and the original blocks. +// convertAttributesToBlocks converts all attributes implementing the Blockable interface to blocks except for "provider_config", +// returning a new NestedBlockObject with the converted attributes and the original blocks. func convertAttributesToBlocks(attributes map[string]AttributeBuilder, blocks map[string]BlockBuilder) NestedBlockObject { newAttributes := make(map[string]AttributeBuilder) newBlocks := make(map[string]BlockBuilder) for name, attr := range attributes { + if name == "provider_config" { + newAttributes[name] = attr + continue + } if lnab, ok := attr.(Blockable); ok { newBlocks[name] = lnab.ToBlock() } else { diff --git a/internal/providers/pluginfw/tfschema/unified_provider.go b/internal/providers/pluginfw/tfschema/unified_provider.go new file mode 100644 index 0000000000..38a1a43087 --- /dev/null +++ b/internal/providers/pluginfw/tfschema/unified_provider.go @@ -0,0 +1,94 @@ +package tfschema + +import ( + "context" + "reflect" + + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +type Namespace struct { + ProviderConfig types.Object `tfsdk:"provider_config"` +} + +// ProviderConfig is used to store the provider configurations for unified terraform provider +// across resources onboarded to plugin framework. +type ProviderConfig struct { + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +func (r ProviderConfig) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) map[string]AttributeBuilder { + attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddPlanModifier( + stringplanmodifier.RequiresReplaceIf(workspaceIDPlanModifier, "", "")) + attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + return attrs +} + +func workspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest, resp *stringplanmodifier.RequiresReplaceIfFuncResponse) { + // Require replacement if workspace_id changes from one non-empty value to another + oldValue := req.StateValue.ValueString() + newValue := req.PlanValue.ValueString() + + if oldValue != "" && newValue != "" && oldValue != newValue { + resp.RequiresReplace = true + } +} + +func (r ProviderConfig) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +func (r ProviderConfig) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + r.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "workspace_id": r.WorkspaceID, + }, + ) +} + +func (r ProviderConfig) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "workspace_id": types.StringType, + }, + } +} + +// ProviderConfigData is used to store the provider configurations for unified terraform provider +// across data sources onboarded to plugin framework. +type ProviderConfigData struct { + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +func (r ProviderConfigData) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) map[string]AttributeBuilder { + attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + return attrs +} + +func (r ProviderConfigData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{} +} + +func (r ProviderConfigData) ToObjectValue(ctx context.Context) basetypes.ObjectValue { + return types.ObjectValueMust( + r.Type(ctx).(basetypes.ObjectType).AttrTypes, + map[string]attr.Value{ + "workspace_id": r.WorkspaceID, + }, + ) +} + +func (r ProviderConfigData) Type(ctx context.Context) attr.Type { + return types.ObjectType{ + AttrTypes: map[string]attr.Type{ + "workspace_id": types.StringType, + }, + } +} From ed8dacdb8055f22e999eab57ac792fc90b388fcc Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Thu, 16 Oct 2025 21:59:00 +0530 Subject: [PATCH 02/27] - --- .../providers/pluginfw/tfschema/attribute_converter.go | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/internal/providers/pluginfw/tfschema/attribute_converter.go b/internal/providers/pluginfw/tfschema/attribute_converter.go index 7865b85aec..56efce9569 100644 --- a/internal/providers/pluginfw/tfschema/attribute_converter.go +++ b/internal/providers/pluginfw/tfschema/attribute_converter.go @@ -7,16 +7,12 @@ type Blockable interface { ToBlock() BlockBuilder } -// convertAttributesToBlocks converts all attributes implementing the Blockable interface to blocks except for "provider_config", -// returning a new NestedBlockObject with the converted attributes and the original blocks. +// convertAttributesToBlocks converts all attributes implementing the Blockable interface to blocks, returning +// a new NestedBlockObject with the converted attributes and the original blocks. func convertAttributesToBlocks(attributes map[string]AttributeBuilder, blocks map[string]BlockBuilder) NestedBlockObject { newAttributes := make(map[string]AttributeBuilder) newBlocks := make(map[string]BlockBuilder) for name, attr := range attributes { - if name == "provider_config" { - newAttributes[name] = attr - continue - } if lnab, ok := attr.(Blockable); ok { newBlocks[name] = lnab.ToBlock() } else { From e9f427578c86313c203e23e8d3e5c0d5a227251e Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Thu, 16 Oct 2025 22:36:52 +0530 Subject: [PATCH 03/27] tests --- .../tfschema/struct_to_schema_test.go | 60 +++++++++++++++++++ .../pluginfw/tfschema/unified_provider.go | 14 +++++ 2 files changed, 74 insertions(+) diff --git a/internal/providers/pluginfw/tfschema/struct_to_schema_test.go b/internal/providers/pluginfw/tfschema/struct_to_schema_test.go index dd06920e97..3bd316faf5 100644 --- a/internal/providers/pluginfw/tfschema/struct_to_schema_test.go +++ b/internal/providers/pluginfw/tfschema/struct_to_schema_test.go @@ -10,6 +10,8 @@ import ( "github.com/databricks/terraform-provider-databricks/common" tfcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common" "github.com/hashicorp/terraform-plugin-framework/attr" + datasource_schema "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + resource_schema "github.com/hashicorp/terraform-plugin-framework/resource/schema" "github.com/hashicorp/terraform-plugin-framework/tfsdk" "github.com/hashicorp/terraform-plugin-framework/types" "github.com/stretchr/testify/assert" @@ -42,6 +44,36 @@ func (TestIntTfSdk) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) return attrs } +type TestNamespaceResourceTfSdk struct { + Namespace +} + +func (a TestNamespaceResourceTfSdk) ApplySchemaCustomizations(s map[string]AttributeBuilder) map[string]AttributeBuilder { + s["provider_config"] = s["provider_config"].SetOptional() + return s +} + +func (a TestNamespaceResourceTfSdk) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "provider_config": reflect.TypeOf(ProviderConfig{}), + } +} + +type TestNamespaceDataSourceTfSdk struct { + Namespace +} + +func (a TestNamespaceDataSourceTfSdk) ApplySchemaCustomizations(s map[string]AttributeBuilder) map[string]AttributeBuilder { + s["provider_config"] = s["provider_config"].SetOptional() + return s +} + +func (a TestNamespaceDataSourceTfSdk) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + return map[string]reflect.Type{ + "provider_config": reflect.TypeOf(ProviderConfigData{}), + } +} + type TestComputedTfSdk struct { ComputedTag types.String `tfsdk:"computedtag"` MultipleTags types.String `tfsdk:"multipletags"` @@ -220,6 +252,18 @@ var tests = []struct { }), }, }, + { + "namespace resource conversion", + TestNamespaceResourceTfSdk{Namespace: Namespace{ProviderConfig: types.ObjectValueMust(ProviderConfig{}.Type(context.Background()).(types.ObjectType).AttrTypes, map[string]attr.Value{ + "workspace_id": types.StringValue("1234567890"), + })}}, + }, + { + "namespace data source conversion", + TestNamespaceDataSourceTfSdk{Namespace: Namespace{ProviderConfig: types.ObjectValueMust(ProviderConfigData{}.Type(context.Background()).(types.ObjectType).AttrTypes, map[string]attr.Value{ + "workspace_id": types.StringValue("1234567890"), + })}}, + }, } // StructToSchemaConversionTestCase runs a single test case to verify StructToSchema works for both data source and resource. @@ -263,6 +307,22 @@ func TestStructToSchemaOptionalVsRequiredField(t *testing.T) { assert.True(t, data_scm.Attributes["enabled"].IsRequired()) } +func TestStructToSchemaNamespace(t *testing.T) { + // Test that provider_config is an optional field. + scm := ResourceStructToSchema(context.Background(), TestNamespaceResourceTfSdk{}, nil) + assert.True(t, scm.Attributes["provider_config"].IsOptional()) + + data_scm := DataSourceStructToSchema(context.Background(), TestNamespaceDataSourceTfSdk{}, nil) + assert.True(t, data_scm.Attributes["provider_config"].IsOptional()) + + // Test that workspace_id is a required field. + scm = ResourceStructToSchema(context.Background(), TestNamespaceResourceTfSdk{}, nil) + assert.True(t, scm.Attributes["provider_config"].(resource_schema.SingleNestedAttribute).Attributes["workspace_id"].IsRequired()) + + data_scm = DataSourceStructToSchema(context.Background(), TestNamespaceDataSourceTfSdk{}, nil) + assert.True(t, data_scm.Attributes["provider_config"].(datasource_schema.SingleNestedAttribute).Attributes["workspace_id"].IsRequired()) +} + func testStructToSchemaPanics(t *testing.T, testStruct any, expectedError string) { defer func() { err := recover() diff --git a/internal/providers/pluginfw/tfschema/unified_provider.go b/internal/providers/pluginfw/tfschema/unified_provider.go index 38a1a43087..495c92a841 100644 --- a/internal/providers/pluginfw/tfschema/unified_provider.go +++ b/internal/providers/pluginfw/tfschema/unified_provider.go @@ -12,6 +12,10 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) +// Namespace is used to store the namespace for unified terraform provider +// across resources and data sourcesonboarded to plugin framework. +// Resources and data sources will use the underlying ProviderConfig and ProviderConfigData +// type respectively to store the provider configurations. type Namespace struct { ProviderConfig types.Object `tfsdk:"provider_config"` } @@ -22,6 +26,7 @@ type ProviderConfig struct { WorkspaceID types.String `tfsdk:"workspace_id"` } +// ApplySchemaCustomizations applies the schema customizations to the ProviderConfig type. func (r ProviderConfig) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) map[string]AttributeBuilder { attrs["workspace_id"] = attrs["workspace_id"].SetRequired() attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddPlanModifier( @@ -30,6 +35,8 @@ func (r ProviderConfig) ApplySchemaCustomizations(attrs map[string]AttributeBuil return attrs } +// workspaceIDPlanModifier is a plan modifier that requires replacement if the +// workspace_id changes from one non-empty value to another func workspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest, resp *stringplanmodifier.RequiresReplaceIfFuncResponse) { // Require replacement if workspace_id changes from one non-empty value to another oldValue := req.StateValue.ValueString() @@ -40,10 +47,12 @@ func workspaceIDPlanModifier(ctx context.Context, req planmodifier.StringRequest } } +// GetComplexFieldTypes returns a map of the types of elements in complex fields in ProviderConfig. func (r ProviderConfig) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{} } +// ToObjectValue returns the object value for the resource func (r ProviderConfig) ToObjectValue(ctx context.Context) basetypes.ObjectValue { return types.ObjectValueMust( r.Type(ctx).(basetypes.ObjectType).AttrTypes, @@ -53,6 +62,7 @@ func (r ProviderConfig) ToObjectValue(ctx context.Context) basetypes.ObjectValue ) } +// Type returns the object type for the ProviderConfig type. func (r ProviderConfig) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ @@ -67,15 +77,18 @@ type ProviderConfigData struct { WorkspaceID types.String `tfsdk:"workspace_id"` } +// ApplySchemaCustomizations applies the schema customizations to the ProviderConfigData type. func (r ProviderConfigData) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) map[string]AttributeBuilder { attrs["workspace_id"] = attrs["workspace_id"].SetRequired() return attrs } +// GetComplexFieldTypes returns a map of the types of elements in complex fields in ProviderConfigData. func (r ProviderConfigData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { return map[string]reflect.Type{} } +// ToObjectValue returns the object value for the data source func (r ProviderConfigData) ToObjectValue(ctx context.Context) basetypes.ObjectValue { return types.ObjectValueMust( r.Type(ctx).(basetypes.ObjectType).AttrTypes, @@ -85,6 +98,7 @@ func (r ProviderConfigData) ToObjectValue(ctx context.Context) basetypes.ObjectV ) } +// Type returns the object type for the ProviderConfigData type. func (r ProviderConfigData) Type(ctx context.Context) attr.Type { return types.ObjectType{ AttrTypes: map[string]attr.Type{ From ac42d981cebc548fe8616a1d2e2567851d979295 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Fri, 17 Oct 2025 14:06:02 +0530 Subject: [PATCH 04/27] - --- NEXT_CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index a42bcb3d3b..53fa8d2ba5 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -13,3 +13,4 @@ ### Exporter ### Internal Changes +* Add provider_config support for plugin framework ([#5104](https://github.com/databricks/terraform-provider-databricks/pull/5104)) From 981a1492c3ec63bca4ad664d197715d10eadd92f Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Fri, 17 Oct 2025 14:09:56 +0530 Subject: [PATCH 05/27] - --- .../tfschema/unified_provider_test.go | 67 +++++++++++++++++++ 1 file changed, 67 insertions(+) create mode 100644 internal/providers/pluginfw/tfschema/unified_provider_test.go diff --git a/internal/providers/pluginfw/tfschema/unified_provider_test.go b/internal/providers/pluginfw/tfschema/unified_provider_test.go new file mode 100644 index 0000000000..3ab1b54da1 --- /dev/null +++ b/internal/providers/pluginfw/tfschema/unified_provider_test.go @@ -0,0 +1,67 @@ +package tfschema + +import ( + "context" + "testing" + + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/stretchr/testify/assert" +) + +func TestWorkspaceIDPlanModifier(t *testing.T) { + tests := []struct { + name string + stateValue string + planValue string + expectedRequiresReplace bool + }{ + { + name: "both non-empty and different - requires replace", + stateValue: "workspace-123", + planValue: "workspace-456", + expectedRequiresReplace: true, + }, + { + name: "both non-empty and same - no replace", + stateValue: "workspace-123", + planValue: "workspace-123", + expectedRequiresReplace: false, + }, + { + name: "old empty, new non-empty - no replace", + stateValue: "", + planValue: "workspace-123", + expectedRequiresReplace: false, + }, + { + name: "old non-empty, new empty - no replace", + stateValue: "workspace-123", + planValue: "", + expectedRequiresReplace: false, + }, + { + name: "both empty - no replace", + stateValue: "", + planValue: "", + expectedRequiresReplace: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + req := planmodifier.StringRequest{ + StateValue: types.StringValue(tt.stateValue), + PlanValue: types.StringValue(tt.planValue), + } + resp := &stringplanmodifier.RequiresReplaceIfFuncResponse{} + + workspaceIDPlanModifier(context.Background(), req, resp) + + assert.Equal(t, tt.expectedRequiresReplace, resp.RequiresReplace, + "RequiresReplace mismatch for state '%s' -> plan '%s'", + tt.stateValue, tt.planValue) + }) + } +} From 0bcf872b6e8d3a4b75fd6538976436dc4080365b Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Fri, 17 Oct 2025 20:23:21 +0530 Subject: [PATCH 06/27] comments --- internal/providers/pluginfw/tfschema/unified_provider.go | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/internal/providers/pluginfw/tfschema/unified_provider.go b/internal/providers/pluginfw/tfschema/unified_provider.go index 495c92a841..aac9ffe1a2 100644 --- a/internal/providers/pluginfw/tfschema/unified_provider.go +++ b/internal/providers/pluginfw/tfschema/unified_provider.go @@ -13,7 +13,7 @@ import ( ) // Namespace is used to store the namespace for unified terraform provider -// across resources and data sourcesonboarded to plugin framework. +// across resources and data sources onboarded to plugin framework. // Resources and data sources will use the underlying ProviderConfig and ProviderConfigData // type respectively to store the provider configurations. type Namespace struct { @@ -80,6 +80,7 @@ type ProviderConfigData struct { // ApplySchemaCustomizations applies the schema customizations to the ProviderConfigData type. func (r ProviderConfigData) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) map[string]AttributeBuilder { attrs["workspace_id"] = attrs["workspace_id"].SetRequired() + attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) return attrs } From ed503f1780687267e1c07217fed6e2d94032d7a8 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Sun, 19 Oct 2025 19:09:32 +0530 Subject: [PATCH 07/27] [Feature] Unified Terraform Provider: Plugin framework resources SDKv2Compatible --- common/client.go | 71 ++-- .../pluginfw/products/sharing/data_share.go | 44 ++- .../pluginfw/products/sharing/data_shares.go | 58 ++- .../products/sharing/data_shares_acc_test.go | 113 +++--- .../products/sharing/resource_share.go | 145 ++++--- .../sharing/resource_share_acc_test.go | 359 ++++++------------ .../pluginfw/tfschema/unified_provider.go | 4 + 7 files changed, 399 insertions(+), 395 deletions(-) diff --git a/common/client.go b/common/client.go index 53400261be..a9919c7d21 100644 --- a/common/client.go +++ b/common/client.go @@ -77,12 +77,29 @@ type DatabricksClient struct { mu sync.Mutex } +// GetWorkspaceClientForUnifiedProviderWithDiagnostics returns the Databricks +// WorkspaceClient for workspace level resources or diagnostics if that fails +// for terraform provider, the provider can be configured at account level or workspace level. +// This implementation will be used by resources and data sources that are developed +// over plugin framework. +func (c *DatabricksClient) GetWorkspaceClientForUnifiedProviderWithDiagnostics( + ctx context.Context, workspaceID string, +) (*databricks.WorkspaceClient, diag.Diagnostics) { + w, err := c.GetWorkspaceClientForUnifiedProvider(ctx, workspaceID) + if err != nil { + return nil, diag.Diagnostics{diag.NewErrorDiagnostic("failed to get workspace client", err.Error())} + } + return w, nil +} + // GetWorkspaceClientForUnifiedProvider returns the Databricks // WorkspaceClient for workspace level resources or diagnostics if that fails // for terraform provider, the provider can be configured at account level or workspace level. +// This implementation will be used by resources and data sources that are developed +// over SDKv2. func (c *DatabricksClient) GetWorkspaceClientForUnifiedProvider( ctx context.Context, workspaceID string, -) (*databricks.WorkspaceClient, diag.Diagnostics) { +) (*databricks.WorkspaceClient, error) { // The provider can be configured at account level or workspace level. if c.Config.IsAccountClient() { return c.getWorkspaceClientForAccountConfiguredProvider(ctx, workspaceID) @@ -95,28 +112,24 @@ func (c *DatabricksClient) GetWorkspaceClientForUnifiedProvider( // at account level. func (c *DatabricksClient) getWorkspaceClientForAccountConfiguredProvider( ctx context.Context, workspaceID string, -) (*databricks.WorkspaceClient, diag.Diagnostics) { +) (*databricks.WorkspaceClient, error) { // Workspace ID must be set in a workspace level resource if // the provider is configured at account level. // TODO: Link to the documentation once migration guide is published if workspaceID == "" { - return nil, diag.Diagnostics{diag.NewErrorDiagnostic( - "workspace_id is not set", - "please set the workspace_id in the provider_config")} + return nil, fmt.Errorf("workspace_id is not set, please set the workspace_id in the provider_config") } // Parse the workspace ID to int. - workspaceIDInt, diags := parseWorkspaceID(workspaceID) - if diags.HasError() { - return nil, diags + workspaceIDInt, err := parseWorkspaceID(workspaceID) + if err != nil { + return nil, err } // Get the workspace client for the workspace ID. w, err := c.WorkspaceClientForWorkspace(ctx, workspaceIDInt) if err != nil { - diags.AddError(fmt.Sprintf( - "failed to get workspace client with workspace_id %d", workspaceIDInt), err.Error()) - return nil, diags + return nil, fmt.Errorf("failed to get workspace client with workspace_id %d: %w", workspaceIDInt, err) } return w, nil } @@ -125,44 +138,40 @@ func (c *DatabricksClient) getWorkspaceClientForAccountConfiguredProvider( // the workspace ID specified in the resource when the provider is configured at workspace level. func (c *DatabricksClient) getWorkspaceClientForWorkspaceConfiguredProvider( ctx context.Context, workspaceID string, -) (*databricks.WorkspaceClient, diag.Diagnostics) { +) (*databricks.WorkspaceClient, error) { // Provider is configured at workspace level and we get the // workspace client from the provider. if workspaceID == "" { - return c.GetWorkspaceClient() + return c.WorkspaceClient() } - workspaceIDInt, diags := parseWorkspaceID(workspaceID) - if diags.HasError() { - return nil, diags + workspaceIDInt, err := parseWorkspaceID(workspaceID) + if err != nil { + return nil, err } // Check if the workspace ID specified in the resource matches // the workspace ID of the provider configured workspace client. - w, clientDiags := c.GetWorkspaceClient() - diags.Append(clientDiags...) - if diags.HasError() { - return nil, diags + w, err := c.WorkspaceClient() + if err != nil { + return nil, err } - err := c.validateWorkspaceIDFromProvider(ctx, workspaceIDInt, w) + + err = c.validateWorkspaceIDFromProvider(ctx, workspaceIDInt, w) if err != nil { - diags.AddError("failed to validate workspace_id", err.Error()) - return nil, diags + return nil, fmt.Errorf("failed to validate workspace_id: %w", err) } // The provider is configured at the workspace level and the // workspace ID matches - return w, diags + return w, nil } // parseWorkspaceID parses the workspace ID from string to int64. -func parseWorkspaceID(workspaceID string) (int64, diag.Diagnostics) { +func parseWorkspaceID(workspaceID string) (int64, error) { workspaceIDInt, err := strconv.ParseInt(workspaceID, 10, 64) if err != nil { - return 0, diag.Diagnostics{ - diag.NewErrorDiagnostic( - "failed to parse workspace_id. please check if the workspace_id in provider_config is a valid integer", err.Error(), - ), - } + return 0, fmt.Errorf("failed to parse workspace_id, please check if the workspace_id in provider_config is a valid integer: %w", err) + } return workspaceIDInt, nil } @@ -182,7 +191,7 @@ func (c *DatabricksClient) validateWorkspaceIDFromProvider(ctx context.Context, if c.cachedWorkspaceID != workspaceID { return fmt.Errorf("workspace_id mismatch: provider is configured for workspace %d but got %d in provider_config. "+ - "Please check the workspace_id provided in provider_config", + "please check the workspace_id provided in provider_config", c.cachedWorkspaceID, workspaceID) } return nil diff --git a/internal/providers/pluginfw/products/sharing/data_share.go b/internal/providers/pluginfw/products/sharing/data_share.go index 7395102aaf..248d5c28fa 100644 --- a/internal/providers/pluginfw/products/sharing/data_share.go +++ b/internal/providers/pluginfw/products/sharing/data_share.go @@ -2,6 +2,7 @@ package sharing import ( "context" + "reflect" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/sharing" @@ -13,6 +14,8 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/service/sharing_tf" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) const dataSourceNameShare = "share" @@ -27,12 +30,29 @@ type ShareDataSource struct { Client *common.DatabricksClient } +type ShareData struct { + sharing_tf.ShareInfo + ProviderConfigData types.Object `tfsdk:"provider_config"` +} + +func (s ShareData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + types := s.ShareInfo.GetComplexFieldTypes(ctx) + types["provider_config"] = reflect.TypeOf(tfschema.ProviderConfigData{}) + return types +} + +func (s ShareData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + s.ShareInfo.ApplySchemaCustomizations(attrs) + attrs["provider_config"] = attrs["provider_config"].SetOptional() + return attrs +} + func (d *ShareDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceNameShare) + resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceNameShare) } func (d *ShareDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, sharing_tf.ShareInfo{}, nil) + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, ShareData{}, nil) resp.Schema = schema.Schema{ Attributes: attrs, Blocks: blocks, @@ -47,14 +67,26 @@ func (d *ShareDataSource) Configure(_ context.Context, req datasource.ConfigureR func (d *ShareDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceNameShare) - w, diags := d.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + + var config ShareData + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) if resp.Diagnostics.HasError() { return } - var config sharing_tf.ShareInfo - diags = req.Config.Get(ctx, &config) + var workspaceID string + if !config.ProviderConfigData.IsNull() { + var namespace tfschema.ProviderConfigData + resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + workspaceID = namespace.WorkspaceID.ValueString() + } + w, diags := d.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/providers/pluginfw/products/sharing/data_shares.go b/internal/providers/pluginfw/products/sharing/data_shares.go index 01fa3b2157..6051bf632b 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares.go +++ b/internal/providers/pluginfw/products/sharing/data_shares.go @@ -13,34 +13,38 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) const dataSourceNameShares = "shares" type SharesList struct { - Shares types.List `tfsdk:"shares"` + Shares types.List `tfsdk:"shares"` + ProviderConfigData types.Object `tfsdk:"provider_config"` } -func (SharesList) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["shares"] = attrs["shares"].SetComputed().SetOptional() - - return attrs -} - -func (SharesList) GetComplexFieldTypes(context.Context) map[string]reflect.Type { +func (s SharesList) GetComplexFieldTypes(context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "shares": reflect.TypeOf(types.String{}), + "shares": reflect.TypeOf(types.String{}), + "provider_config": reflect.TypeOf(tfschema.ProviderConfigData{}), } } -func (SharesList) ToObjectType(ctx context.Context) types.ObjectType { +func (s SharesList) ToObjectType(ctx context.Context) types.ObjectType { return types.ObjectType{ AttrTypes: map[string]attr.Type{ - "shares": types.ListType{ElemType: types.StringType}, + "shares": types.ListType{ElemType: types.StringType}, + "provider_config": tfschema.ProviderConfigData{}.Type(ctx), }, } } +func (s SharesList) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["shares"] = attrs["shares"].SetComputed().SetOptional() + attrs["provider_config"] = attrs["provider_config"].SetOptional() + return attrs +} + func DataSourceShares() datasource.DataSource { return &SharesDataSource{} } @@ -52,7 +56,7 @@ type SharesDataSource struct { } func (d *SharesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceNameShares) + resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceNameShares) } func (d *SharesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { @@ -71,8 +75,28 @@ func (d *SharesDataSource) Configure(_ context.Context, req datasource.Configure func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceNameShares) - w, diags := d.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + + var config SharesList + resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + if resp.Diagnostics.HasError() { + return + } + + var workspaceID string + if !config.ProviderConfigData.IsNull() { + var namespace tfschema.ProviderConfigData + resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + workspaceID = namespace.WorkspaceID.ValueString() + } + w, clientDiags := d.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + + resp.Diagnostics.Append(clientDiags...) if resp.Diagnostics.HasError() { return } @@ -88,5 +112,9 @@ func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, shareNames[i] = types.StringValue(share.Name) } - resp.Diagnostics.Append(resp.State.Set(ctx, SharesList{Shares: types.ListValueMust(types.StringType, shareNames)})...) + newState := SharesList{ + Shares: types.ListValueMust(types.StringType, shareNames), + ProviderConfigData: config.ProviderConfigData, + } + resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) } diff --git a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go index c6237f0095..2ccf43434c 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go @@ -19,72 +19,85 @@ func checkSharesDataSourcePopulated(t *testing.T) func(s *terraform.State) error return nil } } -func TestUcAccDataSourceShares(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: ` - resource "databricks_catalog" "sandbox" { - name = "sandbox{var.RANDOM}" - comment = "this catalog is managed by terraform" - properties = { - purpose = "testing" - } + +const sharesDataTemplate = ` + resource "databricks_catalog" "sandbox" { + name = "sandbox{var.STICKY_RANDOM}" + comment = "this catalog is managed by terraform" + properties = { + purpose = "testing" } + } - resource "databricks_schema" "things" { - catalog_name = databricks_catalog.sandbox.id - name = "things{var.RANDOM}" - comment = "this database is managed by terraform" - properties = { - kind = "various" - } + resource "databricks_schema" "things" { + catalog_name = databricks_catalog.sandbox.id + name = "things{var.STICKY_RANDOM}" + comment = "this database is managed by terraform" + properties = { + kind = "various" } + } - resource "databricks_sql_table" "mytable" { - catalog_name = databricks_catalog.sandbox.id - schema_name = databricks_schema.things.name - name = "bar" - table_type = "MANAGED" - warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + resource "databricks_sql_table" "mytable" { + catalog_name = databricks_catalog.sandbox.id + schema_name = databricks_schema.things.name + name = "bar" + table_type = "MANAGED" + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" - column { - name = "id" - type = "int" - } + column { + name = "id" + type = "int" } + } - resource "databricks_sql_table" "mytable_2" { - catalog_name = databricks_catalog.sandbox.id - schema_name = databricks_schema.things.name - name = "bar_2" - table_type = "MANAGED" - warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + resource "databricks_sql_table" "mytable_2" { + catalog_name = databricks_catalog.sandbox.id + schema_name = databricks_schema.things.name + name = "bar_2" + table_type = "MANAGED" + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" - column { - name = "id" - type = "int" - } + column { + name = "id" + type = "int" } + } - resource "databricks_share" "myshare" { - name = "{var.RANDOM}-terraform-delta-share" - object { - name = databricks_sql_table.mytable.id - comment = "c" - data_object_type = "TABLE" - history_data_sharing_status = "ENABLED" - } - object { - name = databricks_sql_table.mytable_2.id - comment = "c" - data_object_type = "TABLE" - history_data_sharing_status = "ENABLED" - } + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share" + object { + name = databricks_sql_table.mytable.id + comment = "c" + data_object_type = "TABLE" + history_data_sharing_status = "ENABLED" + } + object { + name = databricks_sql_table.mytable_2.id + comment = "c" + data_object_type = "TABLE" + history_data_sharing_status = "ENABLED" } + } +` +func TestUcAccDataSourceShares(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: sharesDataTemplate + ` data "databricks_shares" "this" { depends_on = [databricks_share.myshare] } `, Check: checkSharesDataSourcePopulated(t), + }, acceptance.Step{ + Template: sharesDataTemplate + ` + data "databricks_shares" "this" { + depends_on = [databricks_share.myshare] + provider_config = { + workspace_id = "{env.THIS_WORKSPACE_ID}" + } + } + `, + Check: checkSharesDataSourcePopulated(t), }) } diff --git a/internal/providers/pluginfw/products/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go index 672e758bed..63e1a14a8c 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -20,6 +20,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) const resourceName = "share" @@ -32,13 +33,16 @@ func ResourceShare() resource.Resource { type ShareInfoExtended struct { sharing_tf.ShareInfo_SdkV2 + tfschema.Namespace ID types.String `tfsdk:"id"` // Adding ID field to stay compatible with SDKv2 } var _ pluginfwcommon.ComplexFieldTypeProvider = ShareInfoExtended{} func (s ShareInfoExtended) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - return s.ShareInfo_SdkV2.GetComplexFieldTypes(ctx) + types := s.ShareInfo_SdkV2.GetComplexFieldTypes(ctx) + types["provider_config"] = reflect.TypeOf(tfschema.ProviderConfig{}) + return types } func matchOrder[T any, K comparable](target, reference []T, keyFunc func(T) K) { @@ -142,7 +146,7 @@ type ShareResource struct { } func (r *ShareResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) + resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName) } func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { @@ -159,6 +163,7 @@ func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest, c.SetRequired("object", "partition", "value", "name") c.SetComputed("id") + c.SetOptional("provider_config") return c }) @@ -177,11 +182,7 @@ func (d *ShareResource) Configure(ctx context.Context, req resource.ConfigureReq func (r *ShareResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } + var plan ShareInfoExtended resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) if resp.Diagnostics.HasError() { @@ -199,6 +200,25 @@ func (r *ShareResource) Create(ctx context.Context, req resource.CreateRequest, if resp.Diagnostics.HasError() { return } + + var workspaceID string + if !plan.ProviderConfig.IsNull() { + var namespace tfschema.ProviderConfig + resp.Diagnostics.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + workspaceID = namespace.WorkspaceID.ValueString() + } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + shareInfo, err := w.Shares.Create(ctx, createShare) if err != nil { resp.Diagnostics.AddError("failed to create share", err.Error()) @@ -252,15 +272,27 @@ func (r *ShareResource) Read(ctx context.Context, req resource.ReadRequest, resp return } - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + var getShareRequest sharing.GetShareRequest + getShareRequest.IncludeSharedData = true + resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("name"), &getShareRequest.Name)...) if resp.Diagnostics.HasError() { return } - var getShareRequest sharing.GetShareRequest - getShareRequest.IncludeSharedData = true - resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("name"), &getShareRequest.Name)...) + var workspaceID string + if !existingState.ProviderConfig.IsNull() { + var namespace tfschema.ProviderConfig + resp.Diagnostics.Append(existingState.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + workspaceID = namespace.WorkspaceID.ValueString() + } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(clientDiags...) if resp.Diagnostics.HasError() { return } @@ -302,12 +334,6 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, return } - client, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } - var plan ShareInfoExtended resp.Diagnostics.Append(req.Plan.Get(ctx, &plan)...) if resp.Diagnostics.HasError() { @@ -324,7 +350,24 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, getShareRequest.Name = state.Name.ValueString() getShareRequest.IncludeSharedData = true - currentShareInfo, err := client.Shares.Get(ctx, getShareRequest) + var workspaceID string + if !plan.ProviderConfig.IsNull() { + var namespace tfschema.ProviderConfig + resp.Diagnostics.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + workspaceID = namespace.WorkspaceID.ValueString() + } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } + currentShareInfo, err := w.Shares.Get(ctx, getShareRequest) if err != nil { resp.Diagnostics.AddError("failed to get current share info", err.Error()) return @@ -337,7 +380,7 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, // if owner has changed, update the share owner if !plan.Owner.IsNull() { - updatedShareInfo, err := client.Shares.Update(ctx, sharing.UpdateShare{ + updatedShareInfo, err := w.Shares.Update(ctx, sharing.UpdateShare{ Name: state.Name.ValueString(), Owner: plan.Owner.ValueString(), }) @@ -362,12 +405,12 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, if !plan.Comment.IsNull() { update.Comment = plan.Comment.ValueString() } - upToDateShareInfo, err = client.Shares.Update(ctx, update) + upToDateShareInfo, err = w.Shares.Update(ctx, update) if err != nil { resp.Diagnostics.AddError("failed to update share", err.Error()) - rollbackShareInfo, rollbackErr := client.Shares.Update(ctx, sharing.UpdateShare{ + rollbackShareInfo, rollbackErr := w.Shares.Update(ctx, sharing.UpdateShare{ Name: currentShareInfo.Name, Owner: currentShareInfo.Owner, }) @@ -402,8 +445,8 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, func (r *ShareResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + var state ShareInfoExtended + resp.Diagnostics.Append(req.State.Get(ctx, &state)...) if resp.Diagnostics.HasError() { return } @@ -413,6 +456,24 @@ func (r *ShareResource) Delete(ctx context.Context, req resource.DeleteRequest, if resp.Diagnostics.HasError() { return } + + var workspaceID string + if !state.ProviderConfig.IsNull() { + var namespace tfschema.ProviderConfig + resp.Diagnostics.Append(state.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + if resp.Diagnostics.HasError() { + return + } + workspaceID = namespace.WorkspaceID.ValueString() + } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(clientDiags...) + if resp.Diagnostics.HasError() { + return + } err := w.Shares.DeleteByName(ctx, deleteShareRequest.Name.ValueString()) if err != nil && !apierr.IsMissing(err) { resp.Diagnostics.AddError("failed to delete share", err.Error()) @@ -445,33 +506,17 @@ func (effectiveFieldsActionRead) objectLevel(ctx context.Context, state *sharing state.SyncFieldsDuringRead(ctx, plan) } -// syncEffectiveFields syncs the effective fields between existingState and newState -// and returns the newState -// -// existingState: infrastructure values that are recorded in the existing terraform state. -// newState: latest infrastructure values that are returned by the CRUD API calls. -// -// HCL config is compared with this newState to determine what changes are to be made -// to the infrastructure and then the newState values are recorded in the terraform state. -// Hence we ignore the values in existingState which are not present in newState. -func (r *ShareResource) syncEffectiveFields(ctx context.Context, existingState, newState ShareInfoExtended, mode effectiveFieldsAction) (ShareInfoExtended, diag.Diagnostics) { +func (r *ShareResource) syncEffectiveFields(ctx context.Context, plan, state ShareInfoExtended, mode effectiveFieldsAction) (ShareInfoExtended, diag.Diagnostics) { var d diag.Diagnostics - mode.resourceLevel(ctx, &newState, existingState.ShareInfo_SdkV2) - existingStateObjects, _ := existingState.GetObjects(ctx) - newStateObjects, _ := newState.GetObjects(ctx) + mode.resourceLevel(ctx, &state, plan.ShareInfo_SdkV2) + planObjects, _ := plan.GetObjects(ctx) + stateObjects, _ := state.GetObjects(ctx) finalObjects := []sharing_tf.SharedDataObject_SdkV2{} - for i := range newStateObjects { - // For each object in the new state, we check if it exists in the existing state - // and if it does, we sync the effective fields. - // If it does not exist, we keep the new state object as is. - for j := range existingStateObjects { - if newStateObjects[i].Name == existingStateObjects[j].Name { - mode.objectLevel(ctx, &newStateObjects[i], existingStateObjects[j]) - break - } - } - finalObjects = append(finalObjects, newStateObjects[i]) + for i := range stateObjects { + mode.objectLevel(ctx, &stateObjects[i], planObjects[i]) + finalObjects = append(finalObjects, stateObjects[i]) } - newState.SetObjects(ctx, finalObjects) - return newState, d + state.SetObjects(ctx, finalObjects) + state.ProviderConfig = plan.ProviderConfig // Preserve provider_config from plan + return state, d } diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 1a5054c585..85451516e7 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -1,19 +1,14 @@ package sharing_test import ( - "context" "fmt" - "maps" + "regexp" "testing" - "github.com/databricks/databricks-sdk-go" - "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/internal/acceptance" - "github.com/databricks/terraform-provider-databricks/internal/providers" - "github.com/hashicorp/terraform-plugin-go/tfprotov6" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/terraform" - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) const preTestTemplate = ` @@ -200,6 +195,9 @@ func TestUcAccUpdateShareAddObject(t *testing.T) { data_object_type = "TABLE" history_data_sharing_status = "ENABLED" } + provider_config = { + workspace_id = "{env.THIS_WORKSPACE_ID}" + } }`, }) } @@ -240,188 +238,6 @@ func TestUcAccUpdateShareReorderObject(t *testing.T) { }) } -// TestUcAccUpdateShareNoChanges tests that updating a share with no actual changes doesn't cause issues -func TestUcAccUpdateShareNoChanges(t *testing.T) { - shareConfig := preTestTemplateSchema + - `resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share" - owner = "account users" - object { - name = databricks_schema.schema1.id - data_object_type = "SCHEMA" - } - }` - - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: shareConfig, - }, acceptance.Step{ - PlanOnly: true, - Template: shareConfig, // Same config - should not trigger any updates - }) -} - -// TestUcAccUpdateShareComplexObjectChanges tests complex scenarios with multiple object updates -func TestUcAccUpdateShareComplexObjectChanges(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + - `resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share" - object { - name = databricks_schema.schema1.id - comment = "original comment" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema2.id - comment = "second schema" - data_object_type = "SCHEMA" - } - }`, - }, acceptance.Step{ - // Remove one object, add another, and update comment on existing - Template: preTestTemplateSchema + - `resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share" - object { - name = databricks_schema.schema1.id - comment = "updated comment" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema3.id - comment = "third schema" - data_object_type = "SCHEMA" - } - }`, - }) -} - -// TestUcAccUpdateShareRemoveAllObjects tests removing all objects from a share -func TestUcAccUpdateShareRemoveAllObjects(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + - `resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share" - owner = "account users" - object { - name = databricks_schema.schema1.id - comment = "to be removed" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema2.id - comment = "also to be removed" - data_object_type = "SCHEMA" - } - }`, - }, acceptance.Step{ - Template: preTestTemplateSchema + - `resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share" - owner = "account users" - }`, - }) -} - -// TestUcAccShareMigrationFromSDKv2 tests the transition from sdkv2 to plugin framework. -// This test verifies that existing state created by SDK v2 implementation can be -// successfully managed by the plugin framework implementation without any changes. -func TestUcAccShareMigrationFromSDKv2(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, - // Step 1: Create share using SDK v2 implementation - acceptance.Step{ - ProtoV6ProviderFactories: map[string]func() (tfprotov6.ProviderServer, error){ - "databricks": func() (tfprotov6.ProviderServer, error) { - sdkv2Provider, pluginfwProvider := acceptance.ProvidersWithResourceFallbacks([]string{"databricks_share"}) - return providers.GetProviderServer(context.Background(), providers.WithSdkV2Provider(sdkv2Provider), providers.WithPluginFrameworkProvider(pluginfwProvider)) - }, - }, - Template: preTestTemplateSchema + ` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-migration-share" - object { - name = databricks_schema.schema1.id - comment = "Shared schema object for migration test" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema2.id - comment = "Second shared schema object" - data_object_type = "SCHEMA" - } - }`, - }, - // Step 2: Update the share using plugin framework implementation (default) - // This verifies no changes are needed when switching implementations - acceptance.Step{ - Template: preTestTemplateSchema + ` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-migration-share" - object { - name = databricks_schema.schema1.id - comment = "Updated comment for schema object after migration" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema2.id - comment = "Second shared schema object" - data_object_type = "SCHEMA" - } - }`, - }, - ) -} - -// TestUcAccShareMigrationFromPluginFramework tests the transition from plugin framework to sdkv2. -// This test verifies that existing state created by plugin framework implementation can be -// successfully managed by the SDK v2 implementation without any changes. -func TestUcAccShareMigrationFromPluginFramework(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, - // Step 1: Create share using plugin framework implementation - acceptance.Step{ - Template: preTestTemplateSchema + ` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-migration-share-rollback" - owner = "account users" - object { - name = databricks_schema.schema1.id - comment = "Shared schema object for migration test" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema2.id - comment = "Second shared schema object" - data_object_type = "SCHEMA" - } - }`, - }, - // Step 2: Update the share using SDK v2 (default) - // This verifies no changes are needed when switching implementations - acceptance.Step{ - ProtoV6ProviderFactories: map[string]func() (tfprotov6.ProviderServer, error){ - "databricks": func() (tfprotov6.ProviderServer, error) { - sdkv2Provider, pluginfwProvider := acceptance.ProvidersWithResourceFallbacks([]string{"databricks_share"}) - return providers.GetProviderServer(context.Background(), providers.WithSdkV2Provider(sdkv2Provider), providers.WithPluginFrameworkProvider(pluginfwProvider)) - }, - }, - Template: preTestTemplateSchema + ` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-migration-share-rollback" - owner = "account users" - object { - name = databricks_schema.schema1.id - comment = "Shared schema object for migration test" - data_object_type = "SCHEMA" - } - object { - name = databricks_schema.schema2.id - comment = "Second shared schema object" - data_object_type = "SCHEMA" - } - }`, - }, - ) -} func shareUpdateWithName(name string) string { return fmt.Sprintf(`resource "databricks_share" "myshare" { name = "%s" @@ -542,70 +358,127 @@ func TestUcAccShareReorderObject(t *testing.T) { }) } -func TestUcAccUpdateShareOutsideTerraform(t *testing.T) { - shareName := "" - sharedObjectNameToAdd := "" - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + ` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share-outside-terraform" +func shareTemplate(provider_config string) string { + return fmt.Sprintf(` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-share-config" + %s object { name = databricks_schema.schema1.id data_object_type = "SCHEMA" } - object { - name = databricks_schema.schema3.id - data_object_type = "SCHEMA" + } +`, provider_config) +} + +func TestAccShare_ProviderConfig_Invalid(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "invalid" } - }`, - Check: func(s *terraform.State) error { - resources := s.RootModule().Resources - share := resources["databricks_share.myshare"] - if share == nil { - return fmt.Errorf("expected to find databricks_share.myshare in resources keys: %v", maps.Keys(resources)) + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), + }) +} + +func TestAccJobCluster_ProviderConfig_Mismatched(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "123" + } + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*workspace_id mismatch.*please check the workspace_id provided in provider_config`), + }) +} + +func TestAccJobCluster_ProviderConfig_Required(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { } - shareName = share.Primary.Attributes["name"] - assert.NotEmpty(t, shareName) + `), + ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), + }) +} - schema := resources["databricks_schema.schema2"] - if schema == nil { - return fmt.Errorf("expected to find databricks_schema.schema2 in resources keys: %v", maps.Keys(resources)) +func TestAccJobCluster_ProviderConfig_EmptyID(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "" } - sharedObjectNameToAdd = schema.Primary.Attributes["id"] - assert.NotEmpty(t, sharedObjectNameToAdd) - return nil - }, + `), + ExpectError: regexp.MustCompile(`Attribute provider_config\.workspace_id string length must be at least 1`), + }) +} + +func TestAccJobCluster_ProviderConfig_NotProvided(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }) +} + +func TestAccJobCluster_ProviderConfig_Match(t *testing.T) { + // acceptance.LoadWorkspaceEnv(t) + // get workspace id here from workspace + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), }, acceptance.Step{ - PreConfig: func() { - w, err := databricks.NewWorkspaceClient(&databricks.Config{}) - require.NoError(t, err) - - // Add object to share outside terraform - _, err = w.Shares.Update(context.Background(), sharing.UpdateShare{ - Name: shareName, - Updates: []sharing.SharedDataObjectUpdate{ - { - Action: sharing.SharedDataObjectUpdateActionAdd, - DataObject: &sharing.SharedDataObject{ - Name: sharedObjectNameToAdd, - DataObjectType: "SCHEMA", - }, - }, - }, - }) - require.NoError(t, err) + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "4220866301720038" + } + `), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), + }, }, - Template: preTestTemplateSchema + ` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share-outside-terraform" - object { - name = databricks_schema.schema1.id - data_object_type = "SCHEMA" + }) +} + +func TestAccJobCluster_ProviderConfig_Recreate(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "4220866301720038" } - object { - name = databricks_schema.schema3.id - data_object_type = "SCHEMA" + `), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "123" + } + `), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionReplace), + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + ExpectError: regexp.MustCompile(`failed to validate workspace_id: workspace_id mismatch`), + }) +} + +func TestAccJobCluster_ProviderConfig_Remove(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "4220866301720038" } - }`, + `), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), + }, + }, }) } diff --git a/internal/providers/pluginfw/tfschema/unified_provider.go b/internal/providers/pluginfw/tfschema/unified_provider.go index aac9ffe1a2..765fc58656 100644 --- a/internal/providers/pluginfw/tfschema/unified_provider.go +++ b/internal/providers/pluginfw/tfschema/unified_provider.go @@ -20,6 +20,10 @@ type Namespace struct { ProviderConfig types.Object `tfsdk:"provider_config"` } +type Namespace_SdkV2 struct { + ProviderConfig types.List `tfsdk:"provider_config"` +} + // ProviderConfig is used to store the provider configurations for unified terraform provider // across resources onboarded to plugin framework. type ProviderConfig struct { From 06e38239272166bf697fab4952b74248babfa503 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Sun, 19 Oct 2025 19:13:20 +0530 Subject: [PATCH 08/27] - --- .../products/sharing/data_shares_acc_test.go | 113 +++--- .../sharing/resource_share_acc_test.go | 359 ++++++++++++------ 2 files changed, 293 insertions(+), 179 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go index 2ccf43434c..c6237f0095 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go @@ -19,85 +19,72 @@ func checkSharesDataSourcePopulated(t *testing.T) func(s *terraform.State) error return nil } } - -const sharesDataTemplate = ` - resource "databricks_catalog" "sandbox" { - name = "sandbox{var.STICKY_RANDOM}" - comment = "this catalog is managed by terraform" - properties = { - purpose = "testing" +func TestUcAccDataSourceShares(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: ` + resource "databricks_catalog" "sandbox" { + name = "sandbox{var.RANDOM}" + comment = "this catalog is managed by terraform" + properties = { + purpose = "testing" + } } - } - resource "databricks_schema" "things" { - catalog_name = databricks_catalog.sandbox.id - name = "things{var.STICKY_RANDOM}" - comment = "this database is managed by terraform" - properties = { - kind = "various" + resource "databricks_schema" "things" { + catalog_name = databricks_catalog.sandbox.id + name = "things{var.RANDOM}" + comment = "this database is managed by terraform" + properties = { + kind = "various" + } } - } - resource "databricks_sql_table" "mytable" { - catalog_name = databricks_catalog.sandbox.id - schema_name = databricks_schema.things.name - name = "bar" - table_type = "MANAGED" - warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + resource "databricks_sql_table" "mytable" { + catalog_name = databricks_catalog.sandbox.id + schema_name = databricks_schema.things.name + name = "bar" + table_type = "MANAGED" + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" - column { - name = "id" - type = "int" + column { + name = "id" + type = "int" + } } - } - resource "databricks_sql_table" "mytable_2" { - catalog_name = databricks_catalog.sandbox.id - schema_name = databricks_schema.things.name - name = "bar_2" - table_type = "MANAGED" - warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" + resource "databricks_sql_table" "mytable_2" { + catalog_name = databricks_catalog.sandbox.id + schema_name = databricks_schema.things.name + name = "bar_2" + table_type = "MANAGED" + warehouse_id = "{env.TEST_DEFAULT_WAREHOUSE_ID}" - column { - name = "id" - type = "int" + column { + name = "id" + type = "int" + } } - } - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-terraform-delta-share" - object { - name = databricks_sql_table.mytable.id - comment = "c" - data_object_type = "TABLE" - history_data_sharing_status = "ENABLED" - } - object { - name = databricks_sql_table.mytable_2.id - comment = "c" - data_object_type = "TABLE" - history_data_sharing_status = "ENABLED" + resource "databricks_share" "myshare" { + name = "{var.RANDOM}-terraform-delta-share" + object { + name = databricks_sql_table.mytable.id + comment = "c" + data_object_type = "TABLE" + history_data_sharing_status = "ENABLED" + } + object { + name = databricks_sql_table.mytable_2.id + comment = "c" + data_object_type = "TABLE" + history_data_sharing_status = "ENABLED" + } } - } -` -func TestUcAccDataSourceShares(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: sharesDataTemplate + ` data "databricks_shares" "this" { depends_on = [databricks_share.myshare] } `, Check: checkSharesDataSourcePopulated(t), - }, acceptance.Step{ - Template: sharesDataTemplate + ` - data "databricks_shares" "this" { - depends_on = [databricks_share.myshare] - provider_config = { - workspace_id = "{env.THIS_WORKSPACE_ID}" - } - } - `, - Check: checkSharesDataSourcePopulated(t), }) } diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 85451516e7..1a5054c585 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -1,14 +1,19 @@ package sharing_test import ( + "context" "fmt" - "regexp" + "maps" "testing" + "github.com/databricks/databricks-sdk-go" + "github.com/databricks/databricks-sdk-go/service/sharing" "github.com/databricks/terraform-provider-databricks/internal/acceptance" - "github.com/hashicorp/terraform-plugin-testing/helper/resource" - "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/databricks/terraform-provider-databricks/internal/providers" + "github.com/hashicorp/terraform-plugin-go/tfprotov6" "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) const preTestTemplate = ` @@ -195,9 +200,6 @@ func TestUcAccUpdateShareAddObject(t *testing.T) { data_object_type = "TABLE" history_data_sharing_status = "ENABLED" } - provider_config = { - workspace_id = "{env.THIS_WORKSPACE_ID}" - } }`, }) } @@ -238,6 +240,188 @@ func TestUcAccUpdateShareReorderObject(t *testing.T) { }) } +// TestUcAccUpdateShareNoChanges tests that updating a share with no actual changes doesn't cause issues +func TestUcAccUpdateShareNoChanges(t *testing.T) { + shareConfig := preTestTemplateSchema + + `resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share" + owner = "account users" + object { + name = databricks_schema.schema1.id + data_object_type = "SCHEMA" + } + }` + + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: shareConfig, + }, acceptance.Step{ + PlanOnly: true, + Template: shareConfig, // Same config - should not trigger any updates + }) +} + +// TestUcAccUpdateShareComplexObjectChanges tests complex scenarios with multiple object updates +func TestUcAccUpdateShareComplexObjectChanges(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + + `resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share" + object { + name = databricks_schema.schema1.id + comment = "original comment" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema2.id + comment = "second schema" + data_object_type = "SCHEMA" + } + }`, + }, acceptance.Step{ + // Remove one object, add another, and update comment on existing + Template: preTestTemplateSchema + + `resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share" + object { + name = databricks_schema.schema1.id + comment = "updated comment" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema3.id + comment = "third schema" + data_object_type = "SCHEMA" + } + }`, + }) +} + +// TestUcAccUpdateShareRemoveAllObjects tests removing all objects from a share +func TestUcAccUpdateShareRemoveAllObjects(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + + `resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share" + owner = "account users" + object { + name = databricks_schema.schema1.id + comment = "to be removed" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema2.id + comment = "also to be removed" + data_object_type = "SCHEMA" + } + }`, + }, acceptance.Step{ + Template: preTestTemplateSchema + + `resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share" + owner = "account users" + }`, + }) +} + +// TestUcAccShareMigrationFromSDKv2 tests the transition from sdkv2 to plugin framework. +// This test verifies that existing state created by SDK v2 implementation can be +// successfully managed by the plugin framework implementation without any changes. +func TestUcAccShareMigrationFromSDKv2(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, + // Step 1: Create share using SDK v2 implementation + acceptance.Step{ + ProtoV6ProviderFactories: map[string]func() (tfprotov6.ProviderServer, error){ + "databricks": func() (tfprotov6.ProviderServer, error) { + sdkv2Provider, pluginfwProvider := acceptance.ProvidersWithResourceFallbacks([]string{"databricks_share"}) + return providers.GetProviderServer(context.Background(), providers.WithSdkV2Provider(sdkv2Provider), providers.WithPluginFrameworkProvider(pluginfwProvider)) + }, + }, + Template: preTestTemplateSchema + ` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-migration-share" + object { + name = databricks_schema.schema1.id + comment = "Shared schema object for migration test" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema2.id + comment = "Second shared schema object" + data_object_type = "SCHEMA" + } + }`, + }, + // Step 2: Update the share using plugin framework implementation (default) + // This verifies no changes are needed when switching implementations + acceptance.Step{ + Template: preTestTemplateSchema + ` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-migration-share" + object { + name = databricks_schema.schema1.id + comment = "Updated comment for schema object after migration" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema2.id + comment = "Second shared schema object" + data_object_type = "SCHEMA" + } + }`, + }, + ) +} + +// TestUcAccShareMigrationFromPluginFramework tests the transition from plugin framework to sdkv2. +// This test verifies that existing state created by plugin framework implementation can be +// successfully managed by the SDK v2 implementation without any changes. +func TestUcAccShareMigrationFromPluginFramework(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, + // Step 1: Create share using plugin framework implementation + acceptance.Step{ + Template: preTestTemplateSchema + ` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-migration-share-rollback" + owner = "account users" + object { + name = databricks_schema.schema1.id + comment = "Shared schema object for migration test" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema2.id + comment = "Second shared schema object" + data_object_type = "SCHEMA" + } + }`, + }, + // Step 2: Update the share using SDK v2 (default) + // This verifies no changes are needed when switching implementations + acceptance.Step{ + ProtoV6ProviderFactories: map[string]func() (tfprotov6.ProviderServer, error){ + "databricks": func() (tfprotov6.ProviderServer, error) { + sdkv2Provider, pluginfwProvider := acceptance.ProvidersWithResourceFallbacks([]string{"databricks_share"}) + return providers.GetProviderServer(context.Background(), providers.WithSdkV2Provider(sdkv2Provider), providers.WithPluginFrameworkProvider(pluginfwProvider)) + }, + }, + Template: preTestTemplateSchema + ` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-migration-share-rollback" + owner = "account users" + object { + name = databricks_schema.schema1.id + comment = "Shared schema object for migration test" + data_object_type = "SCHEMA" + } + object { + name = databricks_schema.schema2.id + comment = "Second shared schema object" + data_object_type = "SCHEMA" + } + }`, + }, + ) +} func shareUpdateWithName(name string) string { return fmt.Sprintf(`resource "databricks_share" "myshare" { name = "%s" @@ -358,127 +542,70 @@ func TestUcAccShareReorderObject(t *testing.T) { }) } -func shareTemplate(provider_config string) string { - return fmt.Sprintf(` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-share-config" - %s +func TestUcAccUpdateShareOutsideTerraform(t *testing.T) { + shareName := "" + sharedObjectNameToAdd := "" + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + ` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share-outside-terraform" object { name = databricks_schema.schema1.id data_object_type = "SCHEMA" } - } -`, provider_config) -} - -func TestAccShare_ProviderConfig_Invalid(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "invalid" - } - `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), - }) -} - -func TestAccJobCluster_ProviderConfig_Mismatched(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "123" + object { + name = databricks_schema.schema3.id + data_object_type = "SCHEMA" } - `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*workspace_id mismatch.*please check the workspace_id provided in provider_config`), - }) -} - -func TestAccJobCluster_ProviderConfig_Required(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { + }`, + Check: func(s *terraform.State) error { + resources := s.RootModule().Resources + share := resources["databricks_share.myshare"] + if share == nil { + return fmt.Errorf("expected to find databricks_share.myshare in resources keys: %v", maps.Keys(resources)) } - `), - ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), - }) -} + shareName = share.Primary.Attributes["name"] + assert.NotEmpty(t, shareName) -func TestAccJobCluster_ProviderConfig_EmptyID(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "" + schema := resources["databricks_schema.schema2"] + if schema == nil { + return fmt.Errorf("expected to find databricks_schema.schema2 in resources keys: %v", maps.Keys(resources)) } - `), - ExpectError: regexp.MustCompile(`Attribute provider_config\.workspace_id string length must be at least 1`), - }) -} - -func TestAccJobCluster_ProviderConfig_NotProvided(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(""), - }) -} - -func TestAccJobCluster_ProviderConfig_Match(t *testing.T) { - // acceptance.LoadWorkspaceEnv(t) - // get workspace id here from workspace - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(""), - }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "4220866301720038" - } - `), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), - }, + sharedObjectNameToAdd = schema.Primary.Attributes["id"] + assert.NotEmpty(t, sharedObjectNameToAdd) + return nil }, - }) -} - -func TestAccJobCluster_ProviderConfig_Recreate(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(""), - }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "4220866301720038" - } - `), }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "123" - } - `), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionReplace), - plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionDestroyBeforeCreate), - }, + PreConfig: func() { + w, err := databricks.NewWorkspaceClient(&databricks.Config{}) + require.NoError(t, err) + + // Add object to share outside terraform + _, err = w.Shares.Update(context.Background(), sharing.UpdateShare{ + Name: shareName, + Updates: []sharing.SharedDataObjectUpdate{ + { + Action: sharing.SharedDataObjectUpdateActionAdd, + DataObject: &sharing.SharedDataObject{ + Name: sharedObjectNameToAdd, + DataObjectType: "SCHEMA", + }, + }, + }, + }) + require.NoError(t, err) }, - ExpectError: regexp.MustCompile(`failed to validate workspace_id: workspace_id mismatch`), - }) -} - -func TestAccJobCluster_ProviderConfig_Remove(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(""), - }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config = { - workspace_id = "4220866301720038" + Template: preTestTemplateSchema + ` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-terraform-delta-share-outside-terraform" + object { + name = databricks_schema.schema1.id + data_object_type = "SCHEMA" } - `), - }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(""), - ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ - plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), - }, - }, + object { + name = databricks_schema.schema3.id + data_object_type = "SCHEMA" + } + }`, }) } From 428217a42dccd74ea9dd9dcaee45a1c3a9234958 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Sun, 19 Oct 2025 19:22:21 +0530 Subject: [PATCH 09/27] - --- .../products/sharing/data_shares_acc_test.go | 67 +++++++++ .../sharing/resource_share_acc_test.go | 130 ++++++++++++++++++ 2 files changed, 197 insertions(+) diff --git a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go index c6237f0095..83c17e5f95 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go @@ -1,6 +1,8 @@ package sharing_test import ( + "fmt" + "regexp" "strconv" "testing" @@ -88,3 +90,68 @@ func TestUcAccDataSourceShares(t *testing.T) { Check: checkSharesDataSourcePopulated(t), }) } + +func dataSharesTemplate(provider_config string) string { + return fmt.Sprintf(` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-share-config" + object { + name = databricks_schema.schema1.id + data_object_type = "SCHEMA" + } + } + data "databricks_shares" "this" { + depends_on = [databricks_share.myshare] + %s + } +`, provider_config) +} + +func TestAccDataShares_ProviderConfig_Invalid(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + dataSharesTemplate(` + provider_config = { + workspace_id = "invalid" + } + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), + }) +} + +func TestAccDataShares_ProviderConfig_Mismatched(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + dataSharesTemplate(` + provider_config = { + workspace_id = "123" + } + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*workspace_id mismatch.*please check the workspace_id provided in provider_config`), + }) +} + +func TestAccDataShares_ProviderConfig_Required(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + dataSharesTemplate(` + provider_config = { + } + `), + ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), + }) +} + +func TestAccDataShares_ProviderConfig_EmptyID(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + dataSharesTemplate(` + provider_config = { + workspace_id = "" + } + `), + ExpectError: regexp.MustCompile(`Attribute provider_config\.workspace_id string length must be at least 1`), + }) +} + +func TestAccDataShares_ProviderConfig_NotProvided(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + dataSharesTemplate(""), + }) +} diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 1a5054c585..c67b3935f8 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -4,6 +4,7 @@ import ( "context" "fmt" "maps" + "regexp" "testing" "github.com/databricks/databricks-sdk-go" @@ -609,3 +610,132 @@ func TestUcAccUpdateShareOutsideTerraform(t *testing.T) { }`, }) } + +func shareTemplate(provider_config string) string { + return fmt.Sprintf(` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-share-config" + %s + object { + name = databricks_schema.schema1.id + data_object_type = "SCHEMA" + } + } +`, provider_config) +} + +func TestAccShare_ProviderConfig_Invalid(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "invalid" + } + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), + }) +} + +func TestAccShare_ProviderConfig_Mismatched(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "123" + } + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*workspace_id mismatch.*please check the workspace_id provided in provider_config`), + }) +} + +func TestAccShare_ProviderConfig_Required(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + } + `), + ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), + }) +} + +func TestAccShare_ProviderConfig_EmptyID(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config = { + workspace_id = "" + } + `), + ExpectError: regexp.MustCompile(`Attribute provider_config\.workspace_id string length must be at least 1`), + }) +} + +func TestAccShare_ProviderConfig_NotProvided(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }) +} + +// func TestAccShare_ProviderConfig_Match(t *testing.T) { +// // acceptance.LoadWorkspaceEnv(t) +// // get workspace id here from workspace +// acceptance.UnityWorkspaceLevel(t, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(""), +// }, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(` +// provider_config = { +// workspace_id = "4220866301720038" +// } +// `), +// ConfigPlanChecks: resource.ConfigPlanChecks{ +// PreApply: []plancheck.PlanCheck{ +// common.CheckResourceUpdate{Address: "databricks_share.myshare"}, +// common.CheckResourceNoDelete{Address: "databricks_share.myshare"}, +// common.CheckResourceNoCreate{Address: "databricks_share.myshare"}, +// }, +// }, +// }) +// } + +// func TestAccShare_ProviderConfig_Recreate(t *testing.T) { +// acceptance.UnityWorkspaceLevel(t, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(""), +// }, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(` +// provider_config = { +// workspace_id = "4220866301720038" +// } +// `), +// }, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(` +// provider_config = { +// workspace_id = "123" +// } +// `), +// ConfigPlanChecks: resource.ConfigPlanChecks{ +// PreApply: []plancheck.PlanCheck{ +// common.CheckResourceCreate{Address: "databricks_share.myshare"}, +// common.CheckResourceDelete{Address: "databricks_share.myshare"}, +// }, +// }, +// ExpectError: regexp.MustCompile(`failed to validate workspace_id: workspace_id mismatch`), +// }) +// } + +// func TestAccShare_ProviderConfig_Remove(t *testing.T) { +// acceptance.UnityWorkspaceLevel(t, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(""), +// }, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(` +// provider_config = { +// workspace_id = "4220866301720038" +// } +// `), +// }, acceptance.Step{ +// Template: preTestTemplateSchema + shareTemplate(""), +// ConfigPlanChecks: resource.ConfigPlanChecks{ +// PreApply: []plancheck.PlanCheck{ +// common.CheckResourceUpdate{Address: "databricks_share.myshare"}, +// common.CheckResourceNoDelete{Address: "databricks_share.myshare"}, +// common.CheckResourceNoCreate{Address: "databricks_share.myshare"}, +// }, +// }, +// }) +// } From 80f69893e9822ef2125f19458948536e0f389125 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 13:25:49 +0530 Subject: [PATCH 10/27] - --- .../pluginfw/products/sharing/data_share.go | 2 +- .../pluginfw/products/sharing/data_shares.go | 2 +- .../products/sharing/resource_share.go | 59 ++++---- .../sharing/resource_share_acc_test.go | 139 +++++++++--------- 4 files changed, 97 insertions(+), 105 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/data_share.go b/internal/providers/pluginfw/products/sharing/data_share.go index 248d5c28fa..0de700a608 100644 --- a/internal/providers/pluginfw/products/sharing/data_share.go +++ b/internal/providers/pluginfw/products/sharing/data_share.go @@ -48,7 +48,7 @@ func (s ShareData) ApplySchemaCustomizations(attrs map[string]tfschema.Attribute } func (d *ShareDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceNameShare) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceNameShare) } func (d *ShareDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { diff --git a/internal/providers/pluginfw/products/sharing/data_shares.go b/internal/providers/pluginfw/products/sharing/data_shares.go index 6051bf632b..c91a0dad6d 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares.go +++ b/internal/providers/pluginfw/products/sharing/data_shares.go @@ -56,7 +56,7 @@ type SharesDataSource struct { } func (d *SharesDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(dataSourceNameShares) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceNameShares) } func (d *SharesDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { diff --git a/internal/providers/pluginfw/products/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go index 63e1a14a8c..351d960c16 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -20,7 +20,6 @@ import ( "github.com/hashicorp/terraform-plugin-framework/resource/schema/int64planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) const resourceName = "share" @@ -33,7 +32,7 @@ func ResourceShare() resource.Resource { type ShareInfoExtended struct { sharing_tf.ShareInfo_SdkV2 - tfschema.Namespace + tfschema.Namespace_SdkV2 ID types.String `tfsdk:"id"` // Adding ID field to stay compatible with SDKv2 } @@ -146,7 +145,7 @@ type ShareResource struct { } func (r *ShareResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = pluginfwcommon.GetDatabricksStagingName(resourceName) + resp.TypeName = pluginfwcommon.GetDatabricksProductionName(resourceName) } func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { @@ -163,7 +162,7 @@ func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest, c.SetRequired("object", "partition", "value", "name") c.SetComputed("id") - c.SetOptional("provider_config") + // c.SetOptional("provider_config") return c }) @@ -202,16 +201,15 @@ func (r *ShareResource) Create(ctx context.Context, req resource.CreateRequest, } var workspaceID string - if !plan.ProviderConfig.IsNull() { - var namespace tfschema.ProviderConfig - resp.Diagnostics.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) + if !plan.ProviderConfig.IsNull() && !plan.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(plan.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) if resp.Diagnostics.HasError() { return } - workspaceID = namespace.WorkspaceID.ValueString() + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } } w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) @@ -280,16 +278,15 @@ func (r *ShareResource) Read(ctx context.Context, req resource.ReadRequest, resp } var workspaceID string - if !existingState.ProviderConfig.IsNull() { - var namespace tfschema.ProviderConfig - resp.Diagnostics.Append(existingState.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) + if !existingState.ProviderConfig.IsNull() && !existingState.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(existingState.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) if resp.Diagnostics.HasError() { return } - workspaceID = namespace.WorkspaceID.ValueString() + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } } w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) @@ -351,16 +348,15 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, getShareRequest.IncludeSharedData = true var workspaceID string - if !plan.ProviderConfig.IsNull() { - var namespace tfschema.ProviderConfig - resp.Diagnostics.Append(plan.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) + if !plan.ProviderConfig.IsNull() && !plan.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(plan.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) if resp.Diagnostics.HasError() { return } - workspaceID = namespace.WorkspaceID.ValueString() + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } } w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) @@ -458,16 +454,15 @@ func (r *ShareResource) Delete(ctx context.Context, req resource.DeleteRequest, } var workspaceID string - if !state.ProviderConfig.IsNull() { - var namespace tfschema.ProviderConfig - resp.Diagnostics.Append(state.ProviderConfig.As(ctx, &namespace, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) + if !state.ProviderConfig.IsNull() && !state.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(state.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) if resp.Diagnostics.HasError() { return } - workspaceID = namespace.WorkspaceID.ValueString() + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } } w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index c67b3935f8..e57f4c4723 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -12,6 +12,8 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/acceptance" "github.com/databricks/terraform-provider-databricks/internal/providers" "github.com/hashicorp/terraform-plugin-go/tfprotov6" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/hashicorp/terraform-plugin-testing/terraform" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" @@ -627,7 +629,7 @@ func shareTemplate(provider_config string) string { func TestAccShare_ProviderConfig_Invalid(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` - provider_config = { + provider_config { workspace_id = "invalid" } `), @@ -638,7 +640,7 @@ func TestAccShare_ProviderConfig_Invalid(t *testing.T) { func TestAccShare_ProviderConfig_Mismatched(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` - provider_config = { + provider_config { workspace_id = "123" } `), @@ -649,7 +651,7 @@ func TestAccShare_ProviderConfig_Mismatched(t *testing.T) { func TestAccShare_ProviderConfig_Required(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` - provider_config = { + provider_config { } `), ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), @@ -659,11 +661,11 @@ func TestAccShare_ProviderConfig_Required(t *testing.T) { func TestAccShare_ProviderConfig_EmptyID(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` - provider_config = { + provider_config { workspace_id = "" } `), - ExpectError: regexp.MustCompile(`Attribute provider_config\.workspace_id string length must be at least 1`), + ExpectError: regexp.MustCompile(`Attribute provider_config\[0\]\.workspace_id string length must be at least 1`), }) } @@ -673,69 +675,64 @@ func TestAccShare_ProviderConfig_NotProvided(t *testing.T) { }) } -// func TestAccShare_ProviderConfig_Match(t *testing.T) { -// // acceptance.LoadWorkspaceEnv(t) -// // get workspace id here from workspace -// acceptance.UnityWorkspaceLevel(t, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(""), -// }, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(` -// provider_config = { -// workspace_id = "4220866301720038" -// } -// `), -// ConfigPlanChecks: resource.ConfigPlanChecks{ -// PreApply: []plancheck.PlanCheck{ -// common.CheckResourceUpdate{Address: "databricks_share.myshare"}, -// common.CheckResourceNoDelete{Address: "databricks_share.myshare"}, -// common.CheckResourceNoCreate{Address: "databricks_share.myshare"}, -// }, -// }, -// }) -// } - -// func TestAccShare_ProviderConfig_Recreate(t *testing.T) { -// acceptance.UnityWorkspaceLevel(t, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(""), -// }, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(` -// provider_config = { -// workspace_id = "4220866301720038" -// } -// `), -// }, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(` -// provider_config = { -// workspace_id = "123" -// } -// `), -// ConfigPlanChecks: resource.ConfigPlanChecks{ -// PreApply: []plancheck.PlanCheck{ -// common.CheckResourceCreate{Address: "databricks_share.myshare"}, -// common.CheckResourceDelete{Address: "databricks_share.myshare"}, -// }, -// }, -// ExpectError: regexp.MustCompile(`failed to validate workspace_id: workspace_id mismatch`), -// }) -// } - -// func TestAccShare_ProviderConfig_Remove(t *testing.T) { -// acceptance.UnityWorkspaceLevel(t, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(""), -// }, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(` -// provider_config = { -// workspace_id = "4220866301720038" -// } -// `), -// }, acceptance.Step{ -// Template: preTestTemplateSchema + shareTemplate(""), -// ConfigPlanChecks: resource.ConfigPlanChecks{ -// PreApply: []plancheck.PlanCheck{ -// common.CheckResourceUpdate{Address: "databricks_share.myshare"}, -// common.CheckResourceNoDelete{Address: "databricks_share.myshare"}, -// common.CheckResourceNoCreate{Address: "databricks_share.myshare"}, -// }, -// }, -// }) -// } +func TestAccShare_ProviderConfig_Match(t *testing.T) { + // acceptance.LoadWorkspaceEnv(t) + // get workspace id here from workspace + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "575821473882772" + } + `), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), + }, + }, + }) +} + +func TestAccShare_ProviderConfig_Recreate(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "575821473882772" + } + `), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "123" + } + `), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionDestroyBeforeCreate), + }, + }, + ExpectError: regexp.MustCompile(`failed to validate workspace_id: workspace_id mismatch`), + }) +} + +func TestAccShare_ProviderConfig_Remove(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "575821473882772" + } + `), + }, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(""), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), + }, + }, + }) +} From ab655f795f80e173ab5977313b5a669f2176197e Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 13:30:45 +0530 Subject: [PATCH 11/27] - --- .../products/sharing/resource_share.go | 37 ++++++++++++++----- 1 file changed, 27 insertions(+), 10 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go index 351d960c16..9584eb8773 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -501,17 +501,34 @@ func (effectiveFieldsActionRead) objectLevel(ctx context.Context, state *sharing state.SyncFieldsDuringRead(ctx, plan) } -func (r *ShareResource) syncEffectiveFields(ctx context.Context, plan, state ShareInfoExtended, mode effectiveFieldsAction) (ShareInfoExtended, diag.Diagnostics) { +// syncEffectiveFields syncs the effective fields between existingState and newState +// and returns the newState +// +// existingState: infrastructure values that are recorded in the existing terraform state. +// newState: latest infrastructure values that are returned by the CRUD API calls. +// +// HCL config is compared with this newState to determine what changes are to be made +// to the infrastructure and then the newState values are recorded in the terraform state. +// Hence we ignore the values in existingState which are not present in newState. +func (r *ShareResource) syncEffectiveFields(ctx context.Context, existingState, newState ShareInfoExtended, mode effectiveFieldsAction) (ShareInfoExtended, diag.Diagnostics) { var d diag.Diagnostics - mode.resourceLevel(ctx, &state, plan.ShareInfo_SdkV2) - planObjects, _ := plan.GetObjects(ctx) - stateObjects, _ := state.GetObjects(ctx) + mode.resourceLevel(ctx, &newState, existingState.ShareInfo_SdkV2) + existingStateObjects, _ := existingState.GetObjects(ctx) + newStateObjects, _ := newState.GetObjects(ctx) finalObjects := []sharing_tf.SharedDataObject_SdkV2{} - for i := range stateObjects { - mode.objectLevel(ctx, &stateObjects[i], planObjects[i]) - finalObjects = append(finalObjects, stateObjects[i]) + for i := range newStateObjects { + // For each object in the new state, we check if it exists in the existing state + // and if it does, we sync the effective fields. + // If it does not exist, we keep the new state object as is. + for j := range existingStateObjects { + if newStateObjects[i].Name == existingStateObjects[j].Name { + mode.objectLevel(ctx, &newStateObjects[i], existingStateObjects[j]) + break + } + } + finalObjects = append(finalObjects, newStateObjects[i]) } - state.SetObjects(ctx, finalObjects) - state.ProviderConfig = plan.ProviderConfig // Preserve provider_config from plan - return state, d + newState.SetObjects(ctx, finalObjects) + newState.ProviderConfig = existingState.ProviderConfig // Preserve provider_config from existing state + return newState, d } From ffbc9a1751946c1ca4abd8ec5d335ecc297bc5cd Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 13:31:55 +0530 Subject: [PATCH 12/27] - --- NEXT_CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index c9fd7c68e7..89e8a67d0d 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -15,4 +15,4 @@ ### Exporter ### Internal Changes -* Add provider_config support for plugin framework ([#5104](https://github.com/databricks/terraform-provider-databricks/pull/5104)) +* Add `provider_config` support for SDKv2 compatible plugin framework resources and data sources([#5115](https://github.com/databricks/terraform-provider-databricks/pull/5115)) From 89f5f46e0d28ea9d16a93fe0899da02a8923e343 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 13:56:26 +0530 Subject: [PATCH 13/27] - --- .../pluginfw/products/sharing/data_share.go | 17 ++++----- .../products/sharing/data_share_acc_test.go | 36 +++++++++++++++++++ .../pluginfw/products/sharing/data_shares.go | 24 ++++++------- .../products/sharing/data_shares_acc_test.go | 8 ++--- 4 files changed, 59 insertions(+), 26 deletions(-) create mode 100644 internal/providers/pluginfw/products/sharing/data_share_acc_test.go diff --git a/internal/providers/pluginfw/products/sharing/data_share.go b/internal/providers/pluginfw/products/sharing/data_share.go index 0de700a608..4333402aa6 100644 --- a/internal/providers/pluginfw/products/sharing/data_share.go +++ b/internal/providers/pluginfw/products/sharing/data_share.go @@ -14,8 +14,6 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/service/sharing_tf" "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) const dataSourceNameShare = "share" @@ -32,7 +30,7 @@ type ShareDataSource struct { type ShareData struct { sharing_tf.ShareInfo - ProviderConfigData types.Object `tfsdk:"provider_config"` + tfschema.Namespace_SdkV2 } func (s ShareData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { @@ -75,16 +73,15 @@ func (d *ShareDataSource) Read(ctx context.Context, req datasource.ReadRequest, } var workspaceID string - if !config.ProviderConfigData.IsNull() { - var namespace tfschema.ProviderConfigData - resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) + if !config.ProviderConfig.IsNull() && !config.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(config.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) if resp.Diagnostics.HasError() { return } - workspaceID = namespace.WorkspaceID.ValueString() + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } } w, diags := d.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(diags...) diff --git a/internal/providers/pluginfw/products/sharing/data_share_acc_test.go b/internal/providers/pluginfw/products/sharing/data_share_acc_test.go new file mode 100644 index 0000000000..831b3755b3 --- /dev/null +++ b/internal/providers/pluginfw/products/sharing/data_share_acc_test.go @@ -0,0 +1,36 @@ +package sharing_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/databricks/terraform-provider-databricks/internal/acceptance" +) + +func dataShareTemplate(provider_config string) string { + return fmt.Sprintf(` + resource "databricks_share" "myshare" { + name = "{var.STICKY_RANDOM}-share-config" + object { + name = databricks_schema.schema1.id + data_object_type = "SCHEMA" + } + } + data "databricks_share" "this" { + name = databricks_share.myshare.name + %s + } +`, provider_config) +} + +func TestAccDataShare_ProviderConfig_Invalid(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + dataShareTemplate(` + provider_config { + workspace_id = "invalid" + } + `), + ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), + }) +} diff --git a/internal/providers/pluginfw/products/sharing/data_shares.go b/internal/providers/pluginfw/products/sharing/data_shares.go index c91a0dad6d..35def4dcb6 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares.go +++ b/internal/providers/pluginfw/products/sharing/data_shares.go @@ -13,14 +13,13 @@ import ( "github.com/hashicorp/terraform-plugin-framework/datasource" "github.com/hashicorp/terraform-plugin-framework/datasource/schema" "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" ) const dataSourceNameShares = "shares" type SharesList struct { - Shares types.List `tfsdk:"shares"` - ProviderConfigData types.Object `tfsdk:"provider_config"` + Shares types.List `tfsdk:"shares"` + tfschema.Namespace_SdkV2 } func (s SharesList) GetComplexFieldTypes(context.Context) map[string]reflect.Type { @@ -83,16 +82,15 @@ func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, } var workspaceID string - if !config.ProviderConfigData.IsNull() { - var namespace tfschema.ProviderConfigData - resp.Diagnostics.Append(config.ProviderConfigData.As(ctx, &namespace, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) + if !config.ProviderConfig.IsNull() && !config.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(config.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) if resp.Diagnostics.HasError() { return } - workspaceID = namespace.WorkspaceID.ValueString() + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } } w, clientDiags := d.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) @@ -113,8 +111,10 @@ func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, } newState := SharesList{ - Shares: types.ListValueMust(types.StringType, shareNames), - ProviderConfigData: config.ProviderConfigData, + Shares: types.ListValueMust(types.StringType, shareNames), + Namespace_SdkV2: tfschema.Namespace_SdkV2{ + ProviderConfig: config.ProviderConfig, + }, } resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) } diff --git a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go index 83c17e5f95..2953fa9c05 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go @@ -110,7 +110,7 @@ func dataSharesTemplate(provider_config string) string { func TestAccDataShares_ProviderConfig_Invalid(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config = { + provider_config { workspace_id = "invalid" } `), @@ -121,7 +121,7 @@ func TestAccDataShares_ProviderConfig_Invalid(t *testing.T) { func TestAccDataShares_ProviderConfig_Mismatched(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config = { + provider_config { workspace_id = "123" } `), @@ -132,7 +132,7 @@ func TestAccDataShares_ProviderConfig_Mismatched(t *testing.T) { func TestAccDataShares_ProviderConfig_Required(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config = { + provider_config { } `), ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), @@ -142,7 +142,7 @@ func TestAccDataShares_ProviderConfig_Required(t *testing.T) { func TestAccDataShares_ProviderConfig_EmptyID(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config = { + provider_config { workspace_id = "" } `), From 800713ec1f7543f7a2bee74a29300ab1f6b3b096 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 14:08:23 +0530 Subject: [PATCH 14/27] - --- .../pluginfw/products/sharing/data_share.go | 39 ++------------ .../products/sharing/data_share_acc_test.go | 36 ------------- .../pluginfw/products/sharing/data_shares.go | 54 +++++-------------- 3 files changed, 18 insertions(+), 111 deletions(-) delete mode 100644 internal/providers/pluginfw/products/sharing/data_share_acc_test.go diff --git a/internal/providers/pluginfw/products/sharing/data_share.go b/internal/providers/pluginfw/products/sharing/data_share.go index 4333402aa6..7395102aaf 100644 --- a/internal/providers/pluginfw/products/sharing/data_share.go +++ b/internal/providers/pluginfw/products/sharing/data_share.go @@ -2,7 +2,6 @@ package sharing import ( "context" - "reflect" "github.com/databricks/databricks-sdk-go/apierr" "github.com/databricks/databricks-sdk-go/service/sharing" @@ -28,29 +27,12 @@ type ShareDataSource struct { Client *common.DatabricksClient } -type ShareData struct { - sharing_tf.ShareInfo - tfschema.Namespace_SdkV2 -} - -func (s ShareData) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - types := s.ShareInfo.GetComplexFieldTypes(ctx) - types["provider_config"] = reflect.TypeOf(tfschema.ProviderConfigData{}) - return types -} - -func (s ShareData) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - s.ShareInfo.ApplySchemaCustomizations(attrs) - attrs["provider_config"] = attrs["provider_config"].SetOptional() - return attrs -} - func (d *ShareDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { resp.TypeName = pluginfwcommon.GetDatabricksProductionName(dataSourceNameShare) } func (d *ShareDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, ShareData{}, nil) + attrs, blocks := tfschema.DataSourceStructToSchemaMap(ctx, sharing_tf.ShareInfo{}, nil) resp.Schema = schema.Schema{ Attributes: attrs, Blocks: blocks, @@ -65,25 +47,14 @@ func (d *ShareDataSource) Configure(_ context.Context, req datasource.ConfigureR func (d *ShareDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceNameShare) - - var config ShareData - resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) + w, diags := d.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } - var workspaceID string - if !config.ProviderConfig.IsNull() && !config.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(config.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } - } - w, diags := d.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + var config sharing_tf.ShareInfo + diags = req.Config.Get(ctx, &config) resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return diff --git a/internal/providers/pluginfw/products/sharing/data_share_acc_test.go b/internal/providers/pluginfw/products/sharing/data_share_acc_test.go deleted file mode 100644 index 831b3755b3..0000000000 --- a/internal/providers/pluginfw/products/sharing/data_share_acc_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package sharing_test - -import ( - "fmt" - "regexp" - "testing" - - "github.com/databricks/terraform-provider-databricks/internal/acceptance" -) - -func dataShareTemplate(provider_config string) string { - return fmt.Sprintf(` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-share-config" - object { - name = databricks_schema.schema1.id - data_object_type = "SCHEMA" - } - } - data "databricks_share" "this" { - name = databricks_share.myshare.name - %s - } -`, provider_config) -} - -func TestAccDataShare_ProviderConfig_Invalid(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + dataShareTemplate(` - provider_config { - workspace_id = "invalid" - } - `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), - }) -} diff --git a/internal/providers/pluginfw/products/sharing/data_shares.go b/internal/providers/pluginfw/products/sharing/data_shares.go index 35def4dcb6..01fa3b2157 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares.go +++ b/internal/providers/pluginfw/products/sharing/data_shares.go @@ -19,31 +19,28 @@ const dataSourceNameShares = "shares" type SharesList struct { Shares types.List `tfsdk:"shares"` - tfschema.Namespace_SdkV2 } -func (s SharesList) GetComplexFieldTypes(context.Context) map[string]reflect.Type { +func (SharesList) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { + attrs["shares"] = attrs["shares"].SetComputed().SetOptional() + + return attrs +} + +func (SharesList) GetComplexFieldTypes(context.Context) map[string]reflect.Type { return map[string]reflect.Type{ - "shares": reflect.TypeOf(types.String{}), - "provider_config": reflect.TypeOf(tfschema.ProviderConfigData{}), + "shares": reflect.TypeOf(types.String{}), } } -func (s SharesList) ToObjectType(ctx context.Context) types.ObjectType { +func (SharesList) ToObjectType(ctx context.Context) types.ObjectType { return types.ObjectType{ AttrTypes: map[string]attr.Type{ - "shares": types.ListType{ElemType: types.StringType}, - "provider_config": tfschema.ProviderConfigData{}.Type(ctx), + "shares": types.ListType{ElemType: types.StringType}, }, } } -func (s SharesList) ApplySchemaCustomizations(attrs map[string]tfschema.AttributeBuilder) map[string]tfschema.AttributeBuilder { - attrs["shares"] = attrs["shares"].SetComputed().SetOptional() - attrs["provider_config"] = attrs["provider_config"].SetOptional() - return attrs -} - func DataSourceShares() datasource.DataSource { return &SharesDataSource{} } @@ -74,27 +71,8 @@ func (d *SharesDataSource) Configure(_ context.Context, req datasource.Configure func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInDataSourceContext(ctx, dataSourceNameShares) - - var config SharesList - resp.Diagnostics.Append(req.Config.Get(ctx, &config)...) - if resp.Diagnostics.HasError() { - return - } - - var workspaceID string - if !config.ProviderConfig.IsNull() && !config.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(config.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } - } - w, clientDiags := d.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) - - resp.Diagnostics.Append(clientDiags...) + w, diags := d.Client.GetWorkspaceClient() + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } @@ -110,11 +88,5 @@ func (d *SharesDataSource) Read(ctx context.Context, req datasource.ReadRequest, shareNames[i] = types.StringValue(share.Name) } - newState := SharesList{ - Shares: types.ListValueMust(types.StringType, shareNames), - Namespace_SdkV2: tfschema.Namespace_SdkV2{ - ProviderConfig: config.ProviderConfig, - }, - } - resp.Diagnostics.Append(resp.State.Set(ctx, newState)...) + resp.Diagnostics.Append(resp.State.Set(ctx, SharesList{Shares: types.ListValueMust(types.StringType, shareNames)})...) } From 45d2bbaba6af0d7a2a471b5bb0971e9dea12cea9 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 14:09:07 +0530 Subject: [PATCH 15/27] - --- .../products/sharing/data_shares_acc_test.go | 67 ------------------- 1 file changed, 67 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go index 2953fa9c05..c6237f0095 100644 --- a/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/data_shares_acc_test.go @@ -1,8 +1,6 @@ package sharing_test import ( - "fmt" - "regexp" "strconv" "testing" @@ -90,68 +88,3 @@ func TestUcAccDataSourceShares(t *testing.T) { Check: checkSharesDataSourcePopulated(t), }) } - -func dataSharesTemplate(provider_config string) string { - return fmt.Sprintf(` - resource "databricks_share" "myshare" { - name = "{var.STICKY_RANDOM}-share-config" - object { - name = databricks_schema.schema1.id - data_object_type = "SCHEMA" - } - } - data "databricks_shares" "this" { - depends_on = [databricks_share.myshare] - %s - } -`, provider_config) -} - -func TestAccDataShares_ProviderConfig_Invalid(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config { - workspace_id = "invalid" - } - `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), - }) -} - -func TestAccDataShares_ProviderConfig_Mismatched(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config { - workspace_id = "123" - } - `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*workspace_id mismatch.*please check the workspace_id provided in provider_config`), - }) -} - -func TestAccDataShares_ProviderConfig_Required(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config { - } - `), - ExpectError: regexp.MustCompile(`(?s).*workspace_id.*is required`), - }) -} - -func TestAccDataShares_ProviderConfig_EmptyID(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + dataSharesTemplate(` - provider_config { - workspace_id = "" - } - `), - ExpectError: regexp.MustCompile(`Attribute provider_config\.workspace_id string length must be at least 1`), - }) -} - -func TestAccDataShares_ProviderConfig_NotProvided(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + dataSharesTemplate(""), - }) -} From e4a610e4ca2049807fa3d502d501affbcca83efb Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 14:26:42 +0530 Subject: [PATCH 16/27] - --- .../products/library/resource_library.go | 76 +++++++++-- .../resource_quality_monitor.go | 126 +++++++++++++----- 2 files changed, 154 insertions(+), 48 deletions(-) diff --git a/internal/providers/pluginfw/products/library/resource_library.go b/internal/providers/pluginfw/products/library/resource_library.go index ea2aefecd8..0be9f8a933 100644 --- a/internal/providers/pluginfw/products/library/resource_library.go +++ b/internal/providers/pluginfw/products/library/resource_library.go @@ -4,6 +4,7 @@ import ( "context" "errors" "fmt" + "reflect" "time" "github.com/databricks/databricks-sdk-go/apierr" @@ -76,6 +77,13 @@ type LibraryExtended struct { compute_tf.Library_SdkV2 ClusterId types.String `tfsdk:"cluster_id"` ID types.String `tfsdk:"id"` // Adding ID field to stay compatible with SDKv2 + tfschema.Namespace_SdkV2 +} + +func (l LibraryExtended) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { + attrs := l.Library_SdkV2.GetComplexFieldTypes(ctx) + attrs["provider_config"] = reflect.TypeOf(tfschema.ProviderConfig{}) + return attrs } type LibraryResource struct { @@ -105,6 +113,7 @@ func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest } c.SetRequired("cluster_id") c.SetOptional("id") + c.SetOptional("provider_config") c.SetComputed("id") c.SetDeprecated(clusters.EggDeprecationWarning, "egg") return c @@ -124,13 +133,26 @@ func (r *LibraryResource) Configure(ctx context.Context, req resource.ConfigureR func (r *LibraryResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + var libraryTfSDK LibraryExtended + resp.Diagnostics.Append(req.Plan.Get(ctx, &libraryTfSDK)...) if resp.Diagnostics.HasError() { return } - var libraryTfSDK LibraryExtended - resp.Diagnostics.Append(req.Plan.Get(ctx, &libraryTfSDK)...) + + var workspaceID string + if !libraryTfSDK.ProviderConfig.IsNull() && !libraryTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(libraryTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } @@ -171,21 +193,36 @@ func (r *LibraryResource) Create(ctx context.Context, req resource.CreateRequest } installedLib.ID = types.StringValue(libGoSDK.String()) + installedLib.ProviderConfig = libraryTfSDK.ProviderConfig resp.Diagnostics.Append(resp.State.Set(ctx, installedLib)...) } func (r *LibraryResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + var libraryTfSDK LibraryExtended + resp.Diagnostics.Append(req.State.Get(ctx, &libraryTfSDK)...) if resp.Diagnostics.HasError() { return } - var libraryTfSDK LibraryExtended - resp.Diagnostics.Append(req.State.Get(ctx, &libraryTfSDK)...) + + var workspaceID string + if !libraryTfSDK.ProviderConfig.IsNull() && !libraryTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(libraryTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + var libGoSDK compute.Library resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, libraryTfSDK, &libGoSDK)...) if resp.Diagnostics.HasError() { @@ -209,6 +246,7 @@ func (r *LibraryResource) Read(ctx context.Context, req resource.ReadRequest, re return } + installedLib.ProviderConfig = libraryTfSDK.ProviderConfig resp.Diagnostics.Append(resp.State.Set(ctx, installedLib)...) } @@ -218,16 +256,30 @@ func (r *LibraryResource) Update(ctx context.Context, req resource.UpdateRequest func (r *LibraryResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + var libraryTfSDK LibraryExtended + resp.Diagnostics.Append(req.State.Get(ctx, &libraryTfSDK)...) if resp.Diagnostics.HasError() { return } - var libraryTfSDK LibraryExtended - resp.Diagnostics.Append(req.State.Get(ctx, &libraryTfSDK)...) + + var workspaceID string + if !libraryTfSDK.ProviderConfig.IsNull() && !libraryTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(libraryTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + clusterID := libraryTfSDK.ClusterId.ValueString() var libGoSDK compute.Library resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, libraryTfSDK, &libGoSDK)...) diff --git a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go index 99849c4439..25a74e1a70 100644 --- a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go +++ b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go @@ -62,12 +62,15 @@ type MonitorInfoExtended struct { WarehouseId types.String `tfsdk:"warehouse_id"` SkipBuiltinDashboard types.Bool `tfsdk:"skip_builtin_dashboard"` ID types.String `tfsdk:"id"` // Adding ID field to stay compatible with SDKv2 + tfschema.Namespace_SdkV2 } var _ pluginfwcommon.ComplexFieldTypeProvider = MonitorInfoExtended{} func (m MonitorInfoExtended) GetComplexFieldTypes(ctx context.Context) map[string]reflect.Type { - return m.MonitorInfo_SdkV2.GetComplexFieldTypes(ctx) + attrs := m.MonitorInfo_SdkV2.GetComplexFieldTypes(ctx) + attrs["provider_config"] = reflect.TypeOf(tfschema.ProviderConfig{}) + return attrs } type QualityMonitorResource struct { @@ -92,6 +95,7 @@ func (r *QualityMonitorResource) Schema(ctx context.Context, req resource.Schema c.SetOptional("skip_builtin_dashboard") c.SetComputed("id") c.SetOptional("id") + c.SetOptional("provider_config") return c }) resp.Schema = schema.Schema{ @@ -113,13 +117,26 @@ func (d *QualityMonitorResource) ImportState(ctx context.Context, req resource.I func (r *QualityMonitorResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + var monitorInfoTfSDK MonitorInfoExtended + resp.Diagnostics.Append(req.Plan.Get(ctx, &monitorInfoTfSDK)...) if resp.Diagnostics.HasError() { return } - var monitorInfoTfSDK MonitorInfoExtended - resp.Diagnostics.Append(req.Plan.Get(ctx, &monitorInfoTfSDK)...) + + var workspaceID string + if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } @@ -150,25 +167,40 @@ func (r *QualityMonitorResource) Create(ctx context.Context, req resource.Create // We need it to fill additional fields as they are not returned by the API newMonitorInfoTfSDK.WarehouseId = monitorInfoTfSDK.WarehouseId newMonitorInfoTfSDK.SkipBuiltinDashboard = monitorInfoTfSDK.SkipBuiltinDashboard + newMonitorInfoTfSDK.ProviderConfig = monitorInfoTfSDK.ProviderConfig resp.Diagnostics.Append(resp.State.Set(ctx, newMonitorInfoTfSDK)...) } func (r *QualityMonitorResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + + var monitorInfoTfSDK MonitorInfoExtended + resp.Diagnostics.Append(req.State.Get(ctx, &monitorInfoTfSDK)...) if resp.Diagnostics.HasError() { return } - var getMonitor catalog_tf.GetQualityMonitorRequest - resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("table_name"), &getMonitor.TableName)...) + var workspaceID string + if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + endpoint, err := w.QualityMonitors.Get(ctx, catalog.GetQualityMonitorRequest{ - TableName: getMonitor.TableName.ValueString(), + TableName: monitorInfoTfSDK.TableName.ValueString(), }) if err != nil { if apierr.IsMissing(err) { @@ -178,38 +210,26 @@ func (r *QualityMonitorResource) Read(ctx context.Context, req resource.ReadRequ resp.Diagnostics.AddError("failed to get monitor", err.Error()) return } - var monitorInfoTfSDK MonitorInfoExtended - resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, endpoint, &monitorInfoTfSDK)...) + var newMonitorInfoTfSDK MonitorInfoExtended + resp.Diagnostics.Append(converters.GoSdkToTfSdkStruct(ctx, endpoint, &newMonitorInfoTfSDK)...) if resp.Diagnostics.HasError() { return } - monitorInfoTfSDK.ID = monitorInfoTfSDK.TableName - // We need it to fill additional fields as they are not returned by the API - var origWarehouseId types.String - var origSkipBuiltinDashboard types.Bool - resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("warehouse_id"), &origWarehouseId)...) - resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("skip_builtin_dashboard"), &origSkipBuiltinDashboard)...) - if resp.Diagnostics.HasError() { - return + newMonitorInfoTfSDK.ID = monitorInfoTfSDK.TableName + if monitorInfoTfSDK.WarehouseId.ValueString() != "" { + newMonitorInfoTfSDK.WarehouseId = monitorInfoTfSDK.WarehouseId } - if origWarehouseId.ValueString() != "" { - monitorInfoTfSDK.WarehouseId = origWarehouseId - } - if origSkipBuiltinDashboard.ValueBool() { - monitorInfoTfSDK.SkipBuiltinDashboard = origSkipBuiltinDashboard + if monitorInfoTfSDK.SkipBuiltinDashboard.ValueBool() { + newMonitorInfoTfSDK.SkipBuiltinDashboard = monitorInfoTfSDK.SkipBuiltinDashboard } - resp.Diagnostics.Append(resp.State.Set(ctx, monitorInfoTfSDK)...) + newMonitorInfoTfSDK.ProviderConfig = monitorInfoTfSDK.ProviderConfig + resp.Diagnostics.Append(resp.State.Set(ctx, newMonitorInfoTfSDK)...) } func (r *QualityMonitorResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) - if resp.Diagnostics.HasError() { - return - } var monitorInfoTfSDK MonitorInfoExtended resp.Diagnostics.Append(req.Plan.Get(ctx, &monitorInfoTfSDK)...) @@ -230,6 +250,25 @@ func (r *QualityMonitorResource) Update(ctx context.Context, req resource.Update if updateMonitorGoSDK.Schedule != nil { updateMonitorGoSDK.Schedule.PauseStatus = "" } + + var workspaceID string + if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return + } + monitor, err := w.QualityMonitors.Update(ctx, updateMonitorGoSDK) if err != nil { resp.Diagnostics.AddError("failed to update monitor", err.Error()) @@ -252,24 +291,39 @@ func (r *QualityMonitorResource) Update(ctx context.Context, req resource.Update return } + newMonitorInfoTfSDK.ProviderConfig = monitorInfoTfSDK.ProviderConfig resp.Diagnostics.Append(resp.State.Set(ctx, newMonitorInfoTfSDK)...) } func (r *QualityMonitorResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { ctx = pluginfwcontext.SetUserAgentInResourceContext(ctx, resourceName) - w, diags := r.Client.GetWorkspaceClient() - resp.Diagnostics.Append(diags...) + + var monitorInfoTfSDK MonitorInfoExtended + resp.Diagnostics.Append(req.State.Get(ctx, &monitorInfoTfSDK)...) if resp.Diagnostics.HasError() { return } - var deleteRequest catalog_tf.DeleteQualityMonitorRequest - resp.Diagnostics.Append(req.State.GetAttribute(ctx, path.Root("table_name"), &deleteRequest.TableName)...) + var workspaceID string + if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { + var namespaceList []tfschema.ProviderConfig + resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) + if resp.Diagnostics.HasError() { + return + } + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + } + + w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) + resp.Diagnostics.Append(diags...) if resp.Diagnostics.HasError() { return } + _, err := w.QualityMonitors.Delete(ctx, catalog.DeleteQualityMonitorRequest{ - TableName: deleteRequest.TableName.ValueString(), + TableName: monitorInfoTfSDK.TableName.ValueString(), }) if err != nil && !apierr.IsMissing(err) { resp.Diagnostics.AddError("failed to delete monitor", err.Error()) From f3debe9fce2e1844eb64fcd74374361c79f4ee28 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 14:38:36 +0530 Subject: [PATCH 17/27] docs --- docs/resources/library.md | 5 +++++ docs/resources/quality_monitor.md | 5 +++-- docs/resources/share.md | 1 + 3 files changed, 9 insertions(+), 2 deletions(-) diff --git a/docs/resources/library.md b/docs/resources/library.md index d13b23f159..07ae241daf 100644 --- a/docs/resources/library.md +++ b/docs/resources/library.md @@ -127,6 +127,11 @@ resource "databricks_library" "rkeops" { } ``` +## Argument Reference + +The following arguments are supported: +* `provider_config` - (Optional) Configure the provider by specifying `workspace_id`. Allows account level provider to manage this resource. + ## Import !> Importing this resource is not currently supported. diff --git a/docs/resources/quality_monitor.md b/docs/resources/quality_monitor.md index 7ddd8ac866..1e107db5be 100644 --- a/docs/resources/quality_monitor.md +++ b/docs/resources/quality_monitor.md @@ -3,7 +3,7 @@ subcategory: "Unity Catalog" --- # databricks_quality_monitor Resource -This resource allows you to manage [Lakehouse Monitors](https://docs.databricks.com/en/lakehouse-monitoring/index.html) in Databricks. +This resource allows you to manage [Lakehouse Monitors](https://docs.databricks.com/en/lakehouse-monitoring/index.html) in Databricks. -> This resource can only be used with a workspace-level provider! @@ -120,6 +120,7 @@ table. * `skip_builtin_dashboard` - Whether to skip creating a default dashboard summarizing data quality metrics. (Can't be updated after creation). * `slicing_exprs` - List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices. * `warehouse_id` - Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used. (Can't be updated after creation) +* `provider_config` - (Optional) Configure the provider by specifying `workspace_id`. Allows account level provider to manage this resource. ## Attribute Reference @@ -129,7 +130,7 @@ In addition to all arguments above, the following attributes are exported: * `monitor_version` - The version of the monitor config (e.g. 1,2,3). If negative, the monitor may be corrupted * `drift_metrics_table_name` - The full name of the drift metrics table. Format: __catalog_name__.__schema_name__.__table_name__. * `profile_metrics_table_name` - The full name of the profile metrics table. Format: __catalog_name__.__schema_name__.__table_name__. -* `status` - Status of the Monitor +* `status` - Status of the Monitor * `dashboard_id` - The ID of the generated dashboard. ## Related Resources diff --git a/docs/resources/share.md b/docs/resources/share.md index 45a9d5f37b..805dac0cc2 100644 --- a/docs/resources/share.md +++ b/docs/resources/share.md @@ -95,6 +95,7 @@ The following arguments are required: * `cdf_enabled` - (Optional) Whether to enable Change Data Feed (cdf) on the shared object. When this field is set, field `history_data_sharing_status` can not be set. * `start_version` - (Optional) The start version associated with the object for cdf. This allows data providers to control the lowest object version that is accessible by clients. * `history_data_sharing_status` - (Optional) Whether to enable history sharing, one of: `ENABLED`, `DISABLED`. When a table has history sharing enabled, recipients can query table data by version, starting from the current table version. If not specified, clients can only query starting from the version of the object at the time it was added to the share. *NOTE*: The start_version should be less than or equal the current version of the object. When this field is set, field `cdf_enabled` can not be set. +* `provider_config` - (Optional) Configure the provider by specifying `workspace_id`. Allows account level provider to manage this resource. To share only part of a table when you add the table to a share, you can provide partition specifications. This is specified by a number of `partition` blocks. Each entry in `partition` block takes a list of `value` blocks. The field is documented below. From 96c477ad663dc3d2b54fb5b542b3d601765c86d5 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 14:42:21 +0530 Subject: [PATCH 18/27] - --- internal/providers/pluginfw/products/library/resource_library.go | 1 - .../pluginfw/products/qualitymonitor/resource_quality_monitor.go | 1 - internal/providers/pluginfw/products/sharing/resource_share.go | 1 - 3 files changed, 3 deletions(-) diff --git a/internal/providers/pluginfw/products/library/resource_library.go b/internal/providers/pluginfw/products/library/resource_library.go index 0be9f8a933..4d50823c56 100644 --- a/internal/providers/pluginfw/products/library/resource_library.go +++ b/internal/providers/pluginfw/products/library/resource_library.go @@ -113,7 +113,6 @@ func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest } c.SetRequired("cluster_id") c.SetOptional("id") - c.SetOptional("provider_config") c.SetComputed("id") c.SetDeprecated(clusters.EggDeprecationWarning, "egg") return c diff --git a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go index 25a74e1a70..3a4538d961 100644 --- a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go +++ b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go @@ -95,7 +95,6 @@ func (r *QualityMonitorResource) Schema(ctx context.Context, req resource.Schema c.SetOptional("skip_builtin_dashboard") c.SetComputed("id") c.SetOptional("id") - c.SetOptional("provider_config") return c }) resp.Schema = schema.Schema{ diff --git a/internal/providers/pluginfw/products/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go index 9584eb8773..6e0bbc2db6 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -162,7 +162,6 @@ func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest, c.SetRequired("object", "partition", "value", "name") c.SetComputed("id") - // c.SetOptional("provider_config") return c }) From 3172c625c55afc314945e7fb20691608f9af78e4 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Mon, 20 Oct 2025 16:07:41 +0530 Subject: [PATCH 19/27] - --- NEXT_CHANGELOG.md | 3 +- .../sharing/resource_share_acc_test.go | 30 +++++++++++++++++-- .../pluginfw/tfschema/unified_provider.go | 5 ++++ 3 files changed, 34 insertions(+), 4 deletions(-) diff --git a/NEXT_CHANGELOG.md b/NEXT_CHANGELOG.md index 89e8a67d0d..2c3360f5b2 100644 --- a/NEXT_CHANGELOG.md +++ b/NEXT_CHANGELOG.md @@ -6,6 +6,8 @@ ### New Features and Improvements +* Add `provider_config` support for SDKv2 compatible plugin framework resources and data sources([#5115](https://github.com/databricks/terraform-provider-databricks/pull/5115)) + ### Bug Fixes ### Documentation @@ -15,4 +17,3 @@ ### Exporter ### Internal Changes -* Add `provider_config` support for SDKv2 compatible plugin framework resources and data sources([#5115](https://github.com/databricks/terraform-provider-databricks/pull/5115)) diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index e57f4c4723..1f356268e5 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -626,14 +626,26 @@ func shareTemplate(provider_config string) string { `, provider_config) } -func TestAccShare_ProviderConfig_Invalid(t *testing.T) { +func TestAccShare_ProviderConfig_InvalidPlan(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` provider_config { workspace_id = "invalid" } `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*failed to parse workspace_id.*valid integer`), + ExpectError: regexp.MustCompile(`(?s)Attribute provider_config\[0\]\.workspace_id workspace_id must be a valid.*integer, got: invalid`), + PlanOnly: true, + }) +} + +func TestAccShare_ProviderConfig_InvalidApply(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "invalid" + } + `), + ExpectError: regexp.MustCompile(`(?s)Attribute provider_config\[0\]\.workspace_id workspace_id must be a valid.*integer, got: invalid`), }) } @@ -658,7 +670,18 @@ func TestAccShare_ProviderConfig_Required(t *testing.T) { }) } -func TestAccShare_ProviderConfig_EmptyID(t *testing.T) { +func TestAccShare_ProviderConfig_EmptyIDApply(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "" + } + `), + ExpectError: regexp.MustCompile(`Attribute provider_config\[0\]\.workspace_id string length must be at least 1`), + }) +} + +func TestAccShare_ProviderConfig_EmptyIDPlan(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` provider_config { @@ -666,6 +689,7 @@ func TestAccShare_ProviderConfig_EmptyID(t *testing.T) { } `), ExpectError: regexp.MustCompile(`Attribute provider_config\[0\]\.workspace_id string length must be at least 1`), + PlanOnly: true, }) } diff --git a/internal/providers/pluginfw/tfschema/unified_provider.go b/internal/providers/pluginfw/tfschema/unified_provider.go index 765fc58656..6055cd11f4 100644 --- a/internal/providers/pluginfw/tfschema/unified_provider.go +++ b/internal/providers/pluginfw/tfschema/unified_provider.go @@ -3,6 +3,7 @@ package tfschema import ( "context" "reflect" + "regexp" "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/attr" @@ -36,6 +37,8 @@ func (r ProviderConfig) ApplySchemaCustomizations(attrs map[string]AttributeBuil attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddPlanModifier( stringplanmodifier.RequiresReplaceIf(workspaceIDPlanModifier, "", "")) attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddValidator( + stringvalidator.RegexMatches(regexp.MustCompile(`^\d+$`), "workspace_id must be a valid integer")) return attrs } @@ -85,6 +88,8 @@ type ProviderConfigData struct { func (r ProviderConfigData) ApplySchemaCustomizations(attrs map[string]AttributeBuilder) map[string]AttributeBuilder { attrs["workspace_id"] = attrs["workspace_id"].SetRequired() attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddValidator(stringvalidator.LengthAtLeast(1)) + attrs["workspace_id"] = attrs["workspace_id"].(StringAttributeBuilder).AddValidator( + stringvalidator.RegexMatches(regexp.MustCompile(`^\d+$`), "workspace_id must be a valid integer")) return attrs } From 992f7726216a19c7a8787602c10c4e4254d81925 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 21 Oct 2025 00:31:38 +0530 Subject: [PATCH 20/27] - --- .../sharing/resource_share_acc_test.go | 39 ++++++------------- 1 file changed, 12 insertions(+), 27 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 1f356268e5..229f6c924a 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -626,26 +626,18 @@ func shareTemplate(provider_config string) string { `, provider_config) } -func TestAccShare_ProviderConfig_InvalidPlan(t *testing.T) { +func TestAccShare_ProviderConfig_Invalid(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` provider_config { workspace_id = "invalid" } `), - ExpectError: regexp.MustCompile(`(?s)Attribute provider_config\[0\]\.workspace_id workspace_id must be a valid.*integer, got: invalid`), - PlanOnly: true, - }) -} - -func TestAccShare_ProviderConfig_InvalidApply(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config { - workspace_id = "invalid" - } - `), - ExpectError: regexp.MustCompile(`(?s)Attribute provider_config\[0\]\.workspace_id workspace_id must be a valid.*integer, got: invalid`), + ExpectError: regexp.MustCompile( + `(?s)Attribute provider_config\[0\]\.workspace_id ` + + `workspace_id must be a valid.*integer, got: invalid`, + ), + PlanOnly: true, }) } @@ -656,7 +648,11 @@ func TestAccShare_ProviderConfig_Mismatched(t *testing.T) { workspace_id = "123" } `), - ExpectError: regexp.MustCompile(`(?s)failed to get workspace client.*workspace_id mismatch.*please check the workspace_id provided in provider_config`), + ExpectError: regexp.MustCompile( + `(?s)failed to get workspace client.*workspace_id mismatch` + + `.*please check the workspace_id provided in ` + + `provider_config`, + ), }) } @@ -670,18 +666,7 @@ func TestAccShare_ProviderConfig_Required(t *testing.T) { }) } -func TestAccShare_ProviderConfig_EmptyIDApply(t *testing.T) { - acceptance.UnityWorkspaceLevel(t, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` - provider_config { - workspace_id = "" - } - `), - ExpectError: regexp.MustCompile(`Attribute provider_config\[0\]\.workspace_id string length must be at least 1`), - }) -} - -func TestAccShare_ProviderConfig_EmptyIDPlan(t *testing.T) { +func TestAccShare_ProviderConfig_EmptyID(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` provider_config { From d27cf4a08354c3fa2caa2d48c09a7f74e9c5cb47 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 21 Oct 2025 17:28:41 +0530 Subject: [PATCH 21/27] - --- docs/resources/library.md | 29 ++++++++++++++++++++++++++++- docs/resources/quality_monitor.md | 3 ++- docs/resources/share.md | 3 ++- 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/docs/resources/library.md b/docs/resources/library.md index 07ae241daf..f1d601474b 100644 --- a/docs/resources/library.md +++ b/docs/resources/library.md @@ -130,7 +130,34 @@ resource "databricks_library" "rkeops" { ## Argument Reference The following arguments are supported: -* `provider_config` - (Optional) Configure the provider by specifying `workspace_id`. Allows account level provider to manage this resource. + +* `cluster_id` - (Required) ID of the [databricks_cluster](cluster.md) to install the library on. + +You must specify exactly **one** of the following library types: + +* `jar` - (Optional) Path to the JAR library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example: `/Workspace/path/to/library.jar`, `/Volumes/path/to/library.jar` or `s3://my-bucket/library.jar`. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. + +* `egg` - (Optional, Deprecated) Path to the EGG library. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above. Use `whl` or `pypi` instead. + +* `whl` - (Optional) Path to the wheel library. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs. For example: `/Workspace/path/to/library.whl`, `/Volumes/path/to/library.whl` or `s3://my-bucket/library.whl`. If S3 is used, make sure the cluster has read access to the library. You may need to launch the cluster with an IAM role to access the S3 URI. + +* `requirements` - (Optional) Path to the requirements.txt file. Only Workspace paths and Unity Catalog Volumes paths are supported. For example: `/Workspace/path/to/requirements.txt` or `/Volumes/path/to/requirements.txt`. Requires a cluster with DBR 15.0+. + +* `maven` - (Optional) Configuration block for a Maven library. The block consists of the following fields: + * `coordinates` - (Required) Gradle-style Maven coordinates. For example: `org.jsoup:jsoup:1.7.2`. + * `repo` - (Optional) Maven repository to install the Maven package from. If omitted, both Maven Central Repository and Spark Packages are searched. + * `exclusions` - (Optional) List of dependencies to exclude. For example: `["slf4j:slf4j", "*:hadoop-client"]`. See [Maven dependency exclusions](https://maven.apache.org/guides/introduction/introduction-to-optional-and-excludes-dependencies.html) for more information. + +* `pypi` - (Optional) Configuration block for a PyPI library. The block consists of the following fields: + * `package` - (Required) The name of the PyPI package to install. An optional exact version specification is also supported. For example: `simplejson` or `simplejson==3.8.0`. + * `repo` - (Optional) The repository where the package can be found. If not specified, the default pip index is used. + +* `cran` - (Optional) Configuration block for a CRAN library. The block consists of the following fields: + * `package` - (Required) The name of the CRAN package to install. + * `repo` - (Optional) The repository where the package can be found. If not specified, the default CRAN repo is used. + +* `provider_config` - (Optional) Configuration block for management through the account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID that the resource belongs to. This workspace must be part of the account that the provider is configured with. ## Import diff --git a/docs/resources/quality_monitor.md b/docs/resources/quality_monitor.md index 1e107db5be..c067a4cdf0 100644 --- a/docs/resources/quality_monitor.md +++ b/docs/resources/quality_monitor.md @@ -120,7 +120,8 @@ table. * `skip_builtin_dashboard` - Whether to skip creating a default dashboard summarizing data quality metrics. (Can't be updated after creation). * `slicing_exprs` - List of column expressions to slice data with for targeted analysis. The data is grouped by each expression independently, resulting in a separate slice for each predicate and its complements. For high-cardinality columns, only the top 100 unique values by frequency will generate slices. * `warehouse_id` - Optional argument to specify the warehouse for dashboard creation. If not specified, the first running warehouse will be used. (Can't be updated after creation) -* `provider_config` - (Optional) Configure the provider by specifying `workspace_id`. Allows account level provider to manage this resource. +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ## Attribute Reference diff --git a/docs/resources/share.md b/docs/resources/share.md index 805dac0cc2..94a0f9a123 100644 --- a/docs/resources/share.md +++ b/docs/resources/share.md @@ -85,6 +85,8 @@ The following arguments are required: * `name` - (Required) Name of share. Change forces creation of a new resource. * `owner` - (Optional) User name/group name/sp application_id of the share owner. * `comment` - (Optional) User-supplied free-form text. +* `provider_config` - (Optional) Configure the provider for management through account provider. This block consists of the following fields: + * `workspace_id` - (Required) Workspace ID which the resource belongs to. This workspace must be part of the account which the provider is configured with. ### object Configuration Block @@ -95,7 +97,6 @@ The following arguments are required: * `cdf_enabled` - (Optional) Whether to enable Change Data Feed (cdf) on the shared object. When this field is set, field `history_data_sharing_status` can not be set. * `start_version` - (Optional) The start version associated with the object for cdf. This allows data providers to control the lowest object version that is accessible by clients. * `history_data_sharing_status` - (Optional) Whether to enable history sharing, one of: `ENABLED`, `DISABLED`. When a table has history sharing enabled, recipients can query table data by version, starting from the current table version. If not specified, clients can only query starting from the version of the object at the time it was added to the share. *NOTE*: The start_version should be less than or equal the current version of the object. When this field is set, field `cdf_enabled` can not be set. -* `provider_config` - (Optional) Configure the provider by specifying `workspace_id`. Allows account level provider to manage this resource. To share only part of a table when you add the table to a share, you can provide partition specifications. This is specified by a number of `partition` blocks. Each entry in `partition` block takes a list of `value` blocks. The field is documented below. From 2a8f6008c021c0db0c7005149c25fd23083ad2d4 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 21 Oct 2025 17:36:52 +0530 Subject: [PATCH 22/27] - --- .../products/library/resource_library.go | 2 ++ .../qualitymonitor/resource_quality_monitor.go | 2 ++ .../pluginfw/products/sharing/resource_share.go | 4 ++++ .../products/sharing/resource_share_acc_test.go | 16 ++++++++++++++++ 4 files changed, 24 insertions(+) diff --git a/internal/providers/pluginfw/products/library/resource_library.go b/internal/providers/pluginfw/products/library/resource_library.go index 4d50823c56..97688e37c2 100644 --- a/internal/providers/pluginfw/products/library/resource_library.go +++ b/internal/providers/pluginfw/products/library/resource_library.go @@ -17,6 +17,7 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/compute_tf" "github.com/databricks/terraform-provider-databricks/libraries" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -115,6 +116,7 @@ func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest c.SetOptional("id") c.SetComputed("id") c.SetDeprecated(clusters.EggDeprecationWarning, "egg") + c.AddValidator(listvalidator.SizeAtMost(1), "provider_config") return c }) resp.Schema = schema.Schema{ diff --git a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go index 3a4538d961..d356e0bd76 100644 --- a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go +++ b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go @@ -16,6 +16,7 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/catalog_tf" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -95,6 +96,7 @@ func (r *QualityMonitorResource) Schema(ctx context.Context, req resource.Schema c.SetOptional("skip_builtin_dashboard") c.SetComputed("id") c.SetOptional("id") + c.AddValidator(listvalidator.SizeAtMost(1), "provider_config") return c }) resp.Schema = schema.Schema{ diff --git a/internal/providers/pluginfw/products/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go index 6e0bbc2db6..10d48a0364 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -13,6 +13,7 @@ import ( "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters" "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema" "github.com/databricks/terraform-provider-databricks/internal/service/sharing_tf" + "github.com/hashicorp/terraform-plugin-framework-validators/listvalidator" "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/path" "github.com/hashicorp/terraform-plugin-framework/resource" @@ -163,6 +164,9 @@ func (r *ShareResource) Schema(ctx context.Context, req resource.SchemaRequest, c.SetComputed("id") + // Ensure provider_config list has at most 1 element + c.AddValidator(listvalidator.SizeAtMost(1), "provider_config") + return c }) resp.Schema = schema.Schema{ diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 229f6c924a..2b1a06cf90 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -656,6 +656,22 @@ func TestAccShare_ProviderConfig_Mismatched(t *testing.T) { }) } +func TestAccShare_ProviderConfig_Multiple(t *testing.T) { + acceptance.UnityWorkspaceLevel(t, acceptance.Step{ + Template: preTestTemplateSchema + shareTemplate(` + provider_config { + workspace_id = "123" + } + provider_config { + workspace_id = "456" + } + `), + ExpectError: regexp.MustCompile( + `Attribute provider_config list must contain at most 1 element`, + ), + }) +} + func TestAccShare_ProviderConfig_Required(t *testing.T) { acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` From 0054ef85968c0ca9651fd69c5e87280102319a4d Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 21 Oct 2025 17:37:38 +0530 Subject: [PATCH 23/27] - --- .../pluginfw/products/sharing/resource_share_acc_test.go | 1 + 1 file changed, 1 insertion(+) diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 2b1a06cf90..c88885261e 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -669,6 +669,7 @@ func TestAccShare_ProviderConfig_Multiple(t *testing.T) { ExpectError: regexp.MustCompile( `Attribute provider_config list must contain at most 1 element`, ), + PlanOnly: true, }) } From 7333783f4fdb69c5a115d50a33343ab686c62f0c Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Tue, 21 Oct 2025 18:47:44 +0530 Subject: [PATCH 24/27] - --- .../sharing/resource_share_acc_test.go | 39 +++++++++++++------ 1 file changed, 28 insertions(+), 11 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index c88885261e..74e6060d3d 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -5,6 +5,7 @@ import ( "fmt" "maps" "regexp" + "strconv" "testing" "github.com/databricks/databricks-sdk-go" @@ -702,16 +703,20 @@ func TestAccShare_ProviderConfig_NotProvided(t *testing.T) { } func TestAccShare_ProviderConfig_Match(t *testing.T) { - // acceptance.LoadWorkspaceEnv(t) - // get workspace id here from workspace + acceptance.LoadUcwsEnv(t) + ctx := context.Background() + w := databricks.Must(databricks.NewWorkspaceClient()) + workspaceID, err := w.CurrentWorkspaceID(ctx) + require.NoError(t, err) + workspaceIDStr := strconv.FormatInt(workspaceID, 10) acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(""), }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` + Template: preTestTemplateSchema + shareTemplate(fmt.Sprintf(` provider_config { - workspace_id = "575821473882772" + workspace_id = "%s" } - `), + `, workspaceIDStr)), ConfigPlanChecks: resource.ConfigPlanChecks{ PreApply: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionUpdate), @@ -721,14 +726,20 @@ func TestAccShare_ProviderConfig_Match(t *testing.T) { } func TestAccShare_ProviderConfig_Recreate(t *testing.T) { + acceptance.LoadUcwsEnv(t) + ctx := context.Background() + w := databricks.Must(databricks.NewWorkspaceClient()) + workspaceID, err := w.CurrentWorkspaceID(ctx) + require.NoError(t, err) + workspaceIDStr := strconv.FormatInt(workspaceID, 10) acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(""), }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` + Template: preTestTemplateSchema + shareTemplate(fmt.Sprintf(` provider_config { - workspace_id = "575821473882772" + workspace_id = "%s" } - `), + `, workspaceIDStr)), }, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(` provider_config { @@ -745,14 +756,20 @@ func TestAccShare_ProviderConfig_Recreate(t *testing.T) { } func TestAccShare_ProviderConfig_Remove(t *testing.T) { + acceptance.LoadUcwsEnv(t) + ctx := context.Background() + w := databricks.Must(databricks.NewWorkspaceClient()) + workspaceID, err := w.CurrentWorkspaceID(ctx) + require.NoError(t, err) + workspaceIDStr := strconv.FormatInt(workspaceID, 10) acceptance.UnityWorkspaceLevel(t, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(""), }, acceptance.Step{ - Template: preTestTemplateSchema + shareTemplate(` + Template: preTestTemplateSchema + shareTemplate(fmt.Sprintf(` provider_config { - workspace_id = "575821473882772" + workspace_id = "%s" } - `), + `, workspaceIDStr)), }, acceptance.Step{ Template: preTestTemplateSchema + shareTemplate(""), ConfigPlanChecks: resource.ConfigPlanChecks{ From 5e3532705912c7bbcbed3978e8fb8a765d21cc00 Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Wed, 22 Oct 2025 18:32:22 +0530 Subject: [PATCH 25/27] - --- .../products/library/resource_library.go | 42 ++++--------- .../resource_quality_monitor.go | 56 +++++------------ .../products/sharing/resource_share.go | 60 +++++++------------ .../pluginfw/tfschema/unified_provider.go | 25 ++++++++ 4 files changed, 73 insertions(+), 110 deletions(-) diff --git a/internal/providers/pluginfw/products/library/resource_library.go b/internal/providers/pluginfw/products/library/resource_library.go index 97688e37c2..21e0ac56ee 100644 --- a/internal/providers/pluginfw/products/library/resource_library.go +++ b/internal/providers/pluginfw/products/library/resource_library.go @@ -140,16 +140,10 @@ func (r *LibraryResource) Create(ctx context.Context, req resource.CreateRequest return } - var workspaceID string - if !libraryTfSDK.ProviderConfig.IsNull() && !libraryTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(libraryTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, libraryTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) @@ -206,16 +200,10 @@ func (r *LibraryResource) Read(ctx context.Context, req resource.ReadRequest, re return } - var workspaceID string - if !libraryTfSDK.ProviderConfig.IsNull() && !libraryTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(libraryTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, libraryTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) @@ -263,16 +251,10 @@ func (r *LibraryResource) Delete(ctx context.Context, req resource.DeleteRequest return } - var workspaceID string - if !libraryTfSDK.ProviderConfig.IsNull() && !libraryTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(libraryTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, libraryTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) diff --git a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go index d356e0bd76..92c8b4011b 100644 --- a/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go +++ b/internal/providers/pluginfw/products/qualitymonitor/resource_quality_monitor.go @@ -124,16 +124,10 @@ func (r *QualityMonitorResource) Create(ctx context.Context, req resource.Create return } - var workspaceID string - if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, monitorInfoTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) @@ -182,16 +176,10 @@ func (r *QualityMonitorResource) Read(ctx context.Context, req resource.ReadRequ return } - var workspaceID string - if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, monitorInfoTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) @@ -252,16 +240,10 @@ func (r *QualityMonitorResource) Update(ctx context.Context, req resource.Update updateMonitorGoSDK.Schedule.PauseStatus = "" } - var workspaceID string - if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, monitorInfoTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) @@ -305,16 +287,10 @@ func (r *QualityMonitorResource) Delete(ctx context.Context, req resource.Delete return } - var workspaceID string - if !monitorInfoTfSDK.ProviderConfig.IsNull() && !monitorInfoTfSDK.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(monitorInfoTfSDK.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, monitorInfoTfSDK.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } w, diags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) diff --git a/internal/providers/pluginfw/products/sharing/resource_share.go b/internal/providers/pluginfw/products/sharing/resource_share.go index 10d48a0364..14f88aef87 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share.go +++ b/internal/providers/pluginfw/products/sharing/resource_share.go @@ -203,17 +203,12 @@ func (r *ShareResource) Create(ctx context.Context, req resource.CreateRequest, return } - var workspaceID string - if !plan.ProviderConfig.IsNull() && !plan.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(plan.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, plan.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) if resp.Diagnostics.HasError() { @@ -280,17 +275,12 @@ func (r *ShareResource) Read(ctx context.Context, req resource.ReadRequest, resp return } - var workspaceID string - if !existingState.ProviderConfig.IsNull() && !existingState.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(existingState.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, existingState.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) if resp.Diagnostics.HasError() { @@ -350,17 +340,12 @@ func (r *ShareResource) Update(ctx context.Context, req resource.UpdateRequest, getShareRequest.Name = state.Name.ValueString() getShareRequest.IncludeSharedData = true - var workspaceID string - if !plan.ProviderConfig.IsNull() && !plan.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(plan.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, plan.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) if resp.Diagnostics.HasError() { @@ -456,17 +441,12 @@ func (r *ShareResource) Delete(ctx context.Context, req resource.DeleteRequest, return } - var workspaceID string - if !state.ProviderConfig.IsNull() && !state.ProviderConfig.IsUnknown() { - var namespaceList []tfschema.ProviderConfig - resp.Diagnostics.Append(state.ProviderConfig.ElementsAs(ctx, &namespaceList, true)...) - if resp.Diagnostics.HasError() { - return - } - if len(namespaceList) > 0 { - workspaceID = namespaceList[0].WorkspaceID.ValueString() - } + workspaceID, diags := tfschema.GetWorkspaceID_SdkV2(ctx, state.ProviderConfig) + resp.Diagnostics.Append(diags...) + if resp.Diagnostics.HasError() { + return } + w, clientDiags := r.Client.GetWorkspaceClientForUnifiedProviderWithDiagnostics(ctx, workspaceID) resp.Diagnostics.Append(clientDiags...) if resp.Diagnostics.HasError() { diff --git a/internal/providers/pluginfw/tfschema/unified_provider.go b/internal/providers/pluginfw/tfschema/unified_provider.go index 6055cd11f4..3aff327dd9 100644 --- a/internal/providers/pluginfw/tfschema/unified_provider.go +++ b/internal/providers/pluginfw/tfschema/unified_provider.go @@ -7,6 +7,7 @@ import ( "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" "github.com/hashicorp/terraform-plugin-framework/attr" + "github.com/hashicorp/terraform-plugin-framework/diag" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/types" @@ -116,3 +117,27 @@ func (r ProviderConfigData) Type(ctx context.Context) attr.Type { }, } } + +// GetWorkspaceID_SdkV2 extracts the workspace ID from a provider_config list (for SdkV2-compatible resources). +// It returns the workspace ID string and any diagnostics encountered during extraction. +// If the provider_config is not set, it returns an empty string with no diagnostics. +func GetWorkspaceID_SdkV2(ctx context.Context, providerConfig types.List) (string, diag.Diagnostics) { + var diags diag.Diagnostics + var workspaceID string + + if providerConfig.IsNull() || providerConfig.IsUnknown() { + return workspaceID, diags + } + + var namespaceList []ProviderConfig + diags.Append(providerConfig.ElementsAs(ctx, &namespaceList, true)...) + if diags.HasError() { + return workspaceID, diags + } + + if len(namespaceList) > 0 { + workspaceID = namespaceList[0].WorkspaceID.ValueString() + } + + return workspaceID, diags +} From 2408567cd52e135479a7262344a03e9f746e1b2f Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Wed, 22 Oct 2025 18:43:17 +0530 Subject: [PATCH 26/27] - --- .../tfschema/unified_provider_test.go | 68 +++++++++++++++++++ 1 file changed, 68 insertions(+) diff --git a/internal/providers/pluginfw/tfschema/unified_provider_test.go b/internal/providers/pluginfw/tfschema/unified_provider_test.go index 3ab1b54da1..cc7bc619ac 100644 --- a/internal/providers/pluginfw/tfschema/unified_provider_test.go +++ b/internal/providers/pluginfw/tfschema/unified_provider_test.go @@ -4,6 +4,7 @@ import ( "context" "testing" + "github.com/hashicorp/terraform-plugin-framework/attr" "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" "github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier" "github.com/hashicorp/terraform-plugin-framework/types" @@ -65,3 +66,70 @@ func TestWorkspaceIDPlanModifier(t *testing.T) { }) } } + +func TestGetWorkspaceID_SdkV2(t *testing.T) { + ctx := context.Background() + + tests := []struct { + name string + setupProviderConfig func() types.List + expectedWorkspaceID string + expectError bool + }{ + { + name: "valid workspace ID", + setupProviderConfig: func() types.List { + providerConfig := ProviderConfig{ + WorkspaceID: types.StringValue("123456789"), + } + return types.ListValueMust( + ProviderConfig{}.Type(ctx), + []attr.Value{providerConfig.ToObjectValue(ctx)}, + ) + }, + expectedWorkspaceID: "123456789", + expectError: false, + }, + { + name: "null provider_config", + setupProviderConfig: func() types.List { + return types.ListNull(ProviderConfig{}.Type(ctx)) + }, + expectedWorkspaceID: "", + expectError: false, + }, + { + name: "unknown provider_config", + setupProviderConfig: func() types.List { + return types.ListUnknown(ProviderConfig{}.Type(ctx)) + }, + expectedWorkspaceID: "", + expectError: false, + }, + { + name: "empty list", + setupProviderConfig: func() types.List { + return types.ListValueMust( + ProviderConfig{}.Type(ctx), + []attr.Value{}, + ) + }, + expectedWorkspaceID: "", + expectError: false, + }, + } + + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + providerConfigList := tt.setupProviderConfig() + workspaceID, diags := GetWorkspaceID_SdkV2(ctx, providerConfigList) + + if tt.expectError { + assert.True(t, diags.HasError(), "Expected diagnostics error") + } else { + assert.False(t, diags.HasError(), "Expected no diagnostics error") + } + assert.Equal(t, tt.expectedWorkspaceID, workspaceID, "Workspace ID mismatch") + }) + } +} From 90aeca30c16866bbb16d2897926735911d092f4f Mon Sep 17 00:00:00 2001 From: Tanmay Rustagi Date: Wed, 22 Oct 2025 20:23:46 +0530 Subject: [PATCH 27/27] - --- .../pluginfw/products/sharing/resource_share_acc_test.go | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go index 74e6060d3d..e82d65f51a 100644 --- a/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go +++ b/internal/providers/pluginfw/products/sharing/resource_share_acc_test.go @@ -747,11 +747,12 @@ func TestAccShare_ProviderConfig_Recreate(t *testing.T) { } `), ConfigPlanChecks: resource.ConfigPlanChecks{ - PreApply: []plancheck.PlanCheck{ + PostApplyPreRefresh: []plancheck.PlanCheck{ plancheck.ExpectResourceAction("databricks_share.myshare", plancheck.ResourceActionDestroyBeforeCreate), }, }, - ExpectError: regexp.MustCompile(`failed to validate workspace_id: workspace_id mismatch`), + PlanOnly: true, + ExpectNonEmptyPlan: true, }) }