Skip to content

Commit b447d40

Browse files
authored
Unify client construction pathways and logging between SDKv2 and Plugin Framework implementations (#4728)
## Changes Currently, the SDKv2 and Plugin Framework provider implementations have slightly different behavior in how the provider is configured. The SDKv2's schema.Schema is constructed using `DefaultFunc`, which allows setting provider-level configuration attributes to have default values based on environment variables. The specific environment variables are taken from struct tags in the Go SDK's Config object. On the other hand, the Plugin Framework's provider.Schema has no support for default values. Eventually, when the config.Config from the Databricks Go SDK is resolved, those attributes will be populated from the environment anyways. Thus, it should be safe to remove these environment variable defaults from the SDKv2 implementation, ensuring a uniform treatment of provider configuration between both implementations. Beyond this, the code to construct the `DatabricksClient` used by each provider was copied, resulting in a small, unexpected divergence between the two. This common code has now been refactored into `internal/providers/client/databricks_client.go` and is used by both implementations. Lastly, the Plugin Framework implementation incorrectly logged which attributes were configured by the user. This makes it challenging to review debug logs and interpret what attributes were actually set by a user in their provider configuration. This PR addresses this, with a small refactor of the logic to set an individual attribute refactored into a function. ## Tests This should pass existing integration tests. Additionally, when running integration tests locally, I can see the following log output: ``` 2025-05-20T10:41:14.603Z [INFO] databricks: (sdkv2) No attributes specified in provider configuration: tf_provider_addr=registry.terraform.io/hashicorp/databricks tf_rpc=ConfigureProvider tf_mux_provider=tf5to6server.v5tov6Server tf_req_id=b6fd5713-ac58-ef1e-ec8e-10ca5ecb8aa4 2025-05-20T10:41:14.604Z [INFO] databricks: (plugin framework) No attributes specified in provider configuration: tf_provider_addr=registry.terraform.io/hashicorp/databricks tf_rpc=ConfigureProvider tf_mux_provider="*proto6server.Server" tf_req_id=b6fd5713-ac58-ef1e-ec8e-10ca5ecb8aa4 ``` which is now accurate as the provider configuration is empty for integration tests.
1 parent d81aa42 commit b447d40

File tree

5 files changed

+148
-136
lines changed

5 files changed

+148
-136
lines changed

NEXT_CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@
99
### Bug Fixes
1010

1111
* Don't fail delete when `databricks_system_schema` can be disabled only by Databricks [#4727](https://github.com/databricks/terraform-provider-databricks/pull/4727)
12+
* Fix debug logging for attributes used to configure the provider ([#4728](https://github.com/databricks/terraform-provider-databricks/pull/4728)).
1213

1314
### Documentation
1415

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,67 @@
1+
package client
2+
3+
import (
4+
"context"
5+
"fmt"
6+
7+
"github.com/databricks/databricks-sdk-go/client"
8+
"github.com/databricks/databricks-sdk-go/config"
9+
"github.com/databricks/terraform-provider-databricks/commands"
10+
"github.com/databricks/terraform-provider-databricks/common"
11+
"github.com/hashicorp/terraform-plugin-log/tflog"
12+
)
13+
14+
// PrepareDatabricksClient makes some common adjustments to the config that apply in all cases
15+
// and returns a ready-to-use Databricks client. This includes:
16+
// - mapping deprecated auth types to their newer counterparts
17+
// - ensuring the config is resolved
18+
// - setting a default retry timeout if not set
19+
// - setting a default HTTP timeout if not set
20+
//
21+
// TODO: this should be colocated with the definition of DatabricksClient in common/client.go, but
22+
// this isn't possible without introducing a circular dependency. Fixing this will require refactoring
23+
// DatabricksClient out of the common package.
24+
func PrepareDatabricksClient(ctx context.Context, cfg *config.Config, configCustomizer func(*config.Config) error) (*common.DatabricksClient, error) {
25+
if cfg.AuthType != "" {
26+
// mapping from previous Google authentication types
27+
// and current authentication types from Databricks Go SDK
28+
oldToNewerAuthType := map[string]string{
29+
"google-creds": "google-credentials",
30+
"google-accounts": "google-id",
31+
"google-workspace": "google-id",
32+
}
33+
newer, ok := oldToNewerAuthType[cfg.AuthType]
34+
if ok {
35+
tflog.Info(ctx, fmt.Sprintf("Changing required auth_type from %s to %s", cfg.AuthType, newer))
36+
cfg.AuthType = newer
37+
}
38+
}
39+
cfg.EnsureResolved()
40+
// Unless set explicitly, the provider will retry indefinitely until context is cancelled
41+
// by either a timeout or interrupt.
42+
if cfg.RetryTimeoutSeconds == 0 {
43+
cfg.RetryTimeoutSeconds = -1
44+
}
45+
// If not set, the default provider timeout is 65 seconds. Most APIs have a server-side timeout of 60 seconds.
46+
// The additional 5 seconds is to account for network latency.
47+
if cfg.HTTPTimeoutSeconds == 0 {
48+
cfg.HTTPTimeoutSeconds = 65
49+
}
50+
if configCustomizer != nil {
51+
err := configCustomizer(cfg)
52+
if err != nil {
53+
return nil, err
54+
}
55+
}
56+
client, err := client.New(cfg)
57+
if err != nil {
58+
return nil, err
59+
}
60+
pc := &common.DatabricksClient{
61+
DatabricksClient: client,
62+
}
63+
pc.WithCommandExecutor(func(ctx context.Context, client *common.DatabricksClient) common.CommandExecutor {
64+
return commands.NewCommandsAPI(ctx, client)
65+
})
66+
return pc, nil
67+
}

internal/providers/pluginfw/pluginfw.go

Lines changed: 69 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -6,22 +6,21 @@ package pluginfw
66
import (
77
"context"
88
"fmt"
9-
"log"
109
"reflect"
1110
"sort"
1211
"strings"
1312

14-
"github.com/databricks/databricks-sdk-go/client"
1513
"github.com/databricks/databricks-sdk-go/config"
16-
"github.com/databricks/terraform-provider-databricks/commands"
1714
"github.com/databricks/terraform-provider-databricks/common"
15+
"github.com/databricks/terraform-provider-databricks/internal/providers/client"
1816
providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common"
1917
"github.com/hashicorp/terraform-plugin-framework/datasource"
2018
"github.com/hashicorp/terraform-plugin-framework/diag"
2119
"github.com/hashicorp/terraform-plugin-framework/path"
2220
"github.com/hashicorp/terraform-plugin-framework/provider"
2321
"github.com/hashicorp/terraform-plugin-framework/provider/schema"
2422
"github.com/hashicorp/terraform-plugin-framework/resource"
23+
"github.com/hashicorp/terraform-plugin-framework/tfsdk"
2524
"github.com/hashicorp/terraform-plugin-framework/types"
2625
"github.com/hashicorp/terraform-plugin-log/tflog"
2726
)
@@ -98,90 +97,82 @@ func providerSchemaPluginFramework() schema.Schema {
9897
}
9998
}
10099

100+
// setAttribute sets the attribute value in the SDK config corresponding to the attribute name in the provider configuration.
101+
// It returns true if the attribute was set, false if it was not set (because it was unknown or null), and a diag.Diagnostics object in case of error.
102+
func (p *DatabricksProviderPluginFramework) setAttribute(
103+
ctx context.Context,
104+
providerConfig tfsdk.Config,
105+
attr config.ConfigAttribute,
106+
cfg *config.Config,
107+
) (bool, diag.Diagnostics) {
108+
var diags diag.Diagnostics
109+
switch attr.Kind {
110+
case reflect.Bool:
111+
var attrValue types.Bool
112+
diags.Append(providerConfig.GetAttribute(ctx, path.Root(attr.Name), &attrValue)...)
113+
if diags.HasError() {
114+
return false, diags
115+
}
116+
if attrValue.IsNull() || attrValue.IsUnknown() {
117+
return false, diags
118+
}
119+
err := attr.Set(cfg, attrValue.ValueBool())
120+
if err != nil {
121+
diags.Append(diag.NewErrorDiagnostic("Failed to set attribute", err.Error()))
122+
return false, diags
123+
}
124+
case reflect.Int:
125+
var attrValue types.Int64
126+
diags.Append(providerConfig.GetAttribute(ctx, path.Root(attr.Name), &attrValue)...)
127+
if diags.HasError() {
128+
return false, diags
129+
}
130+
if attrValue.IsNull() || attrValue.IsUnknown() {
131+
return false, diags
132+
}
133+
err := attr.Set(cfg, int(attrValue.ValueInt64()))
134+
if err != nil {
135+
diags.Append(diag.NewErrorDiagnostic("Failed to set attribute", err.Error()))
136+
return false, diags
137+
}
138+
case reflect.String:
139+
var attrValue types.String
140+
diags.Append(providerConfig.GetAttribute(ctx, path.Root(attr.Name), &attrValue)...)
141+
if diags.HasError() {
142+
return false, diags
143+
}
144+
if attrValue.IsNull() || attrValue.IsUnknown() {
145+
return false, diags
146+
}
147+
err := attr.Set(cfg, attrValue.ValueString())
148+
if err != nil {
149+
diags.Append(diag.NewErrorDiagnostic(fmt.Sprintf("Failed to set attribute: %s", attr.Name), err.Error()))
150+
return false, diags
151+
}
152+
}
153+
return true, diags
154+
}
155+
101156
func (p *DatabricksProviderPluginFramework) configureDatabricksClient(ctx context.Context, req provider.ConfigureRequest, resp *provider.ConfigureResponse) any {
102157
cfg := &config.Config{}
103158
attrsUsed := []string{}
104159
for _, attr := range config.ConfigAttributes {
105-
switch attr.Kind {
106-
case reflect.Bool:
107-
var attrValue types.Bool
108-
diags := req.Config.GetAttribute(ctx, path.Root(attr.Name), &attrValue)
109-
resp.Diagnostics.Append(diags...)
110-
if resp.Diagnostics.HasError() {
111-
return nil
112-
}
113-
err := attr.Set(cfg, attrValue.ValueBool())
114-
if err != nil {
115-
resp.Diagnostics.Append(diag.NewErrorDiagnostic("Failed to set attribute", err.Error()))
116-
return nil
117-
}
118-
case reflect.Int:
119-
var attrValue types.Int64
120-
diags := req.Config.GetAttribute(ctx, path.Root(attr.Name), &attrValue)
121-
resp.Diagnostics.Append(diags...)
122-
if resp.Diagnostics.HasError() {
123-
return nil
124-
}
125-
err := attr.Set(cfg, int(attrValue.ValueInt64()))
126-
if err != nil {
127-
resp.Diagnostics.Append(diag.NewErrorDiagnostic("Failed to set attribute", err.Error()))
128-
return nil
129-
}
130-
case reflect.String:
131-
var attrValue types.String
132-
diags := req.Config.GetAttribute(ctx, path.Root(attr.Name), &attrValue)
133-
resp.Diagnostics.Append(diags...)
134-
if resp.Diagnostics.HasError() {
135-
return nil
136-
}
137-
err := attr.Set(cfg, attrValue.ValueString())
138-
if err != nil {
139-
resp.Diagnostics.AddError(fmt.Sprintf("Failed to set attribute: %s", attr.Name), err.Error())
140-
return nil
141-
}
142-
}
143-
if attr.Kind == reflect.String {
144-
attrsUsed = append(attrsUsed, attr.Name)
145-
}
146-
}
147-
sort.Strings(attrsUsed)
148-
tflog.Info(ctx, fmt.Sprintf("Explicit and implicit attributes: %s", strings.Join(attrsUsed, ", ")))
149-
if cfg.AuthType != "" {
150-
// mapping from previous Google authentication types
151-
// and current authentication types from Databricks Go SDK
152-
oldToNewerAuthType := map[string]string{
153-
"google-creds": "google-credentials",
154-
"google-accounts": "google-id",
155-
"google-workspace": "google-id",
156-
}
157-
newer, ok := oldToNewerAuthType[cfg.AuthType]
160+
ok, diags := p.setAttribute(ctx, req.Config, attr, cfg)
161+
resp.Diagnostics.Append(diags...)
158162
if ok {
159-
log.Printf("[INFO] Changing required auth_type from %s to %s", cfg.AuthType, newer)
160-
cfg.AuthType = newer
163+
attrsUsed = append(attrsUsed, attr.Name)
161164
}
162165
}
163-
// If not set, the default provider timeout is 65 seconds. Most APIs have a server-side timeout of 60 seconds.
164-
// The additional 5 seconds is to account for network latency.
165-
if cfg.HTTPTimeoutSeconds == 0 {
166-
cfg.HTTPTimeoutSeconds = 65
166+
if len(attrsUsed) > 0 {
167+
sort.Strings(attrsUsed)
168+
tflog.Info(ctx, fmt.Sprintf("(plugin framework) Attributes specified in provider configuration: %s", strings.Join(attrsUsed, ", ")))
169+
} else {
170+
tflog.Info(ctx, "(plugin framework) No attributes specified in provider configuration")
167171
}
168-
if p.configCustomizer != nil {
169-
err := p.configCustomizer(cfg)
170-
if err != nil {
171-
resp.Diagnostics.AddError("Failed to customize config", err.Error())
172-
return nil
173-
}
174-
}
175-
client, err := client.New(cfg)
172+
databricksClient, err := client.PrepareDatabricksClient(ctx, cfg, p.configCustomizer)
176173
if err != nil {
177-
resp.Diagnostics.Append(diag.NewErrorDiagnostic(err.Error(), ""))
174+
resp.Diagnostics.AddError("Failed to configure Databricks client", err.Error())
178175
return nil
179176
}
180-
pc := &common.DatabricksClient{
181-
DatabricksClient: client,
182-
}
183-
pc.WithCommandExecutor(func(ctx context.Context, client *common.DatabricksClient) common.CommandExecutor {
184-
return commands.NewCommandsAPI(ctx, client)
185-
})
186-
return pc
177+
return databricksClient
187178
}

internal/providers/providers_test_utils.go

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -118,7 +118,7 @@ func (pf providerFixture) applyWithPluginFramework(t *testing.T) *common.Databri
118118
func (pf providerFixture) applyAssertions(c *common.DatabricksClient, t *testing.T, err error) *common.DatabricksClient {
119119
if pf.assertError != "" {
120120
require.NotNilf(t, err, "Expected to have %s error", pf.assertError)
121-
require.True(t, strings.HasPrefix(err.Error(), pf.assertError),
121+
require.True(t, strings.Contains(err.Error(), pf.assertError),
122122
"Expected to have '%s' error, but got '%s'", pf.assertError, err)
123123
return nil
124124
}
@@ -184,7 +184,7 @@ func (pf providerFixture) configureProviderAndReturnClient_PluginFramework(t *te
184184
if len(diags) > 0 {
185185
issues := []string{}
186186
for _, d := range diags {
187-
issues = append(issues, d.Summary())
187+
issues = append(issues, d.Summary(), d.Detail())
188188
}
189189
return nil, errors.New(strings.Join(issues, ", "))
190190
}

internal/providers/sdkv2/sdkv2.go

Lines changed: 9 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ package sdkv2
66
import (
77
"context"
88
"fmt"
9-
"log"
109
"os"
1110
"reflect"
1211
"regexp"
@@ -18,7 +17,6 @@ import (
1817
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
1918
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
2019

21-
"github.com/databricks/databricks-sdk-go/client"
2220
"github.com/databricks/databricks-sdk-go/config"
2321
"github.com/databricks/databricks-sdk-go/useragent"
2422

@@ -27,10 +25,10 @@ import (
2725
"github.com/databricks/terraform-provider-databricks/aws"
2826
"github.com/databricks/terraform-provider-databricks/catalog"
2927
"github.com/databricks/terraform-provider-databricks/clusters"
30-
"github.com/databricks/terraform-provider-databricks/commands"
3128
"github.com/databricks/terraform-provider-databricks/common"
3229
"github.com/databricks/terraform-provider-databricks/dashboards"
3330
"github.com/databricks/terraform-provider-databricks/finops"
31+
"github.com/databricks/terraform-provider-databricks/internal/providers/client"
3432
providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common"
3533
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw"
3634
"github.com/databricks/terraform-provider-databricks/jobs"
@@ -317,78 +315,33 @@ func providerSchema() map[string]*schema.Schema {
317315
Sensitive: attr.Sensitive,
318316
}
319317
ps[attr.Name] = fieldSchema
320-
if len(attr.EnvVars) > 0 {
321-
fieldSchema.DefaultFunc = schema.MultiEnvDefaultFunc(attr.EnvVars, nil)
322-
}
323318
}
324-
// TODO: check if still relevant
325-
ps["rate_limit"].DefaultFunc = schema.EnvDefaultFunc("DATABRICKS_RATE_LIMIT", 15)
326-
ps["debug_truncate_bytes"].DefaultFunc = schema.EnvDefaultFunc("DATABRICKS_DEBUG_TRUNCATE_BYTES", 96)
327319
return ps
328320
}
329321

330322
func ConfigureDatabricksClient(ctx context.Context, d *schema.ResourceData, configCustomizer func(*config.Config) error) (any, diag.Diagnostics) {
331323
cfg := &config.Config{}
332324
attrsUsed := []string{}
333-
authsUsed := map[string]bool{}
334325
for _, attr := range config.ConfigAttributes {
335326
if value, ok := d.GetOk(attr.Name); ok {
336327
err := attr.Set(cfg, value)
337328
if err != nil {
338329
return nil, diag.FromErr(err)
339330
}
340-
if attr.Kind == reflect.String {
341-
attrsUsed = append(attrsUsed, attr.Name)
342-
}
343-
if attr.Auth != "" {
344-
authsUsed[attr.Auth] = true
345-
}
346-
}
347-
}
348-
sort.Strings(attrsUsed)
349-
tflog.Info(ctx, fmt.Sprintf("Explicit and implicit attributes: %s", strings.Join(attrsUsed, ", ")))
350-
if cfg.AuthType != "" {
351-
// mapping from previous Google authentication types
352-
// and current authentication types from Databricks Go SDK
353-
oldToNewerAuthType := map[string]string{
354-
"google-creds": "google-credentials",
355-
"google-accounts": "google-id",
356-
"google-workspace": "google-id",
357-
}
358-
newer, ok := oldToNewerAuthType[cfg.AuthType]
359-
if ok {
360-
log.Printf("[INFO] Changing required auth_type from %s to %s", cfg.AuthType, newer)
361-
cfg.AuthType = newer
331+
attrsUsed = append(attrsUsed, attr.Name)
362332
}
363333
}
364-
cfg.EnsureResolved()
365-
// Unless set explicitly, the provider will retry indefinitely until context is cancelled
366-
// by either a timeout or interrupt.
367-
if cfg.RetryTimeoutSeconds == 0 {
368-
cfg.RetryTimeoutSeconds = -1
334+
if len(attrsUsed) > 0 {
335+
sort.Strings(attrsUsed)
336+
tflog.Info(ctx, fmt.Sprintf("(sdkv2) Attributes specified in provider configuration: %s", strings.Join(attrsUsed, ", ")))
337+
} else {
338+
tflog.Info(ctx, "(sdkv2) No attributes specified in provider configuration")
369339
}
370-
// If not set, the default provider timeout is 65 seconds. Most APIs have a server-side timeout of 60 seconds.
371-
// The additional 5 seconds is to account for network latency.
372-
if cfg.HTTPTimeoutSeconds == 0 {
373-
cfg.HTTPTimeoutSeconds = 65
374-
}
375-
if configCustomizer != nil {
376-
err := configCustomizer(cfg)
377-
if err != nil {
378-
return nil, diag.FromErr(err)
379-
}
380-
}
381-
client, err := client.New(cfg)
340+
databricksClient, err := client.PrepareDatabricksClient(ctx, cfg, configCustomizer)
382341
if err != nil {
383342
return nil, diag.FromErr(err)
384343
}
385-
pc := &common.DatabricksClient{
386-
DatabricksClient: client,
387-
}
388-
pc.WithCommandExecutor(func(ctx context.Context, client *common.DatabricksClient) common.CommandExecutor {
389-
return commands.NewCommandsAPI(ctx, client)
390-
})
391-
return pc, nil
344+
return databricksClient, nil
392345
}
393346

394347
type UserAgentExtra struct {

0 commit comments

Comments
 (0)