Skip to content

Commit e8ce7e7

Browse files
[Feature] Library plugin framework migration (#3979)
## Changes <!-- Summary of your changes that are easy to understand --> - Add library resource to plugin framework, not officially migrating because it has suffix `_pluginframework` - Added integration test for creation and importing ## Tests <!-- How is this tested? Please see the checklist below and also describe any other relevant tests --> - [x] `make test` run locally - [x] relevant change in `docs/` folder - [x] covered with integration tests in `internal/acceptance` - [x] relevant acceptance tests are passing - [x] using Go SDK
1 parent 2c6b876 commit e8ce7e7

File tree

3 files changed

+320
-0
lines changed

3 files changed

+320
-0
lines changed

internal/providers/pluginfw/pluginfw.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ import (
1616
"github.com/databricks/terraform-provider-databricks/commands"
1717
"github.com/databricks/terraform-provider-databricks/common"
1818
providercommon "github.com/databricks/terraform-provider-databricks/internal/providers/common"
19+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/library"
1920
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/qualitymonitor"
2021
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/resources/volume"
2122

@@ -42,6 +43,7 @@ var _ provider.Provider = (*DatabricksProviderPluginFramework)(nil)
4243
func (p *DatabricksProviderPluginFramework) Resources(ctx context.Context) []func() resource.Resource {
4344
return []func() resource.Resource{
4445
qualitymonitor.ResourceQualityMonitor,
46+
library.ResourceLibrary,
4547
}
4648
}
4749

Lines changed: 226 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,226 @@
1+
package library
2+
3+
import (
4+
"context"
5+
"fmt"
6+
"time"
7+
8+
"github.com/databricks/databricks-sdk-go/service/compute"
9+
"github.com/databricks/terraform-provider-databricks/clusters"
10+
"github.com/databricks/terraform-provider-databricks/common"
11+
pluginfwcommon "github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/common"
12+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/converters"
13+
"github.com/databricks/terraform-provider-databricks/internal/providers/pluginfw/tfschema"
14+
"github.com/databricks/terraform-provider-databricks/internal/service/compute_tf"
15+
"github.com/databricks/terraform-provider-databricks/libraries"
16+
"github.com/hashicorp/terraform-plugin-framework/diag"
17+
"github.com/hashicorp/terraform-plugin-framework/path"
18+
"github.com/hashicorp/terraform-plugin-framework/resource"
19+
"github.com/hashicorp/terraform-plugin-framework/resource/schema"
20+
"github.com/hashicorp/terraform-plugin-framework/resource/schema/objectplanmodifier"
21+
"github.com/hashicorp/terraform-plugin-framework/resource/schema/stringplanmodifier"
22+
"github.com/hashicorp/terraform-plugin-framework/types"
23+
24+
"github.com/databricks/databricks-sdk-go"
25+
)
26+
27+
const libraryDefaultInstallationTimeout = 15 * time.Minute
28+
29+
var _ resource.ResourceWithConfigure = &LibraryResource{}
30+
31+
func ResourceLibrary() resource.Resource {
32+
return &LibraryResource{}
33+
}
34+
35+
func readLibrary(ctx context.Context, w *databricks.WorkspaceClient, waitParams compute.Wait, libraryRep string, libraryExtended *LibraryExtended) diag.Diagnostics {
36+
res, err := libraries.WaitForLibrariesInstalledSdk(ctx, w, waitParams, libraryDefaultInstallationTimeout)
37+
if err != nil {
38+
return diag.Diagnostics{diag.NewErrorDiagnostic("failed to wait for library installation", err.Error())}
39+
}
40+
41+
for _, v := range res.LibraryStatuses {
42+
thisRep := v.Library.String()
43+
if thisRep == libraryRep {
44+
// This is not entirely necessary as we can directly write the fields in the config into the state, because there's no computed field.
45+
diags := converters.GoSdkToTfSdkStruct(ctx, v.Library, libraryExtended)
46+
47+
if diags.HasError() {
48+
return diags
49+
}
50+
51+
libraryExtended.ClusterId = types.StringValue(waitParams.ClusterID)
52+
53+
return nil
54+
}
55+
}
56+
return diag.Diagnostics{diag.NewErrorDiagnostic("failed to find the installed library", fmt.Sprintf("failed to find %s on %s", libraryRep, waitParams.ClusterID))}
57+
}
58+
59+
type LibraryExtended struct {
60+
compute_tf.Library
61+
ClusterId types.String `tfsdk:"cluster_id"`
62+
}
63+
64+
type LibraryResource struct {
65+
Client *common.DatabricksClient
66+
}
67+
68+
func (r *LibraryResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) {
69+
resp.TypeName = "databricks_library_pluginframework"
70+
}
71+
72+
func (r *LibraryResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) {
73+
resp.Schema = schema.Schema{
74+
Description: "Terraform schema for Databricks Library",
75+
Attributes: tfschema.ResourceStructToSchemaMap(LibraryExtended{}, func(c tfschema.CustomizableSchema) tfschema.CustomizableSchema {
76+
// c.AddPlanModifier(stringplanmodifier.RequiresReplace(), "cluster_id")
77+
// c.AddPlanModifier(objectplanmodifier.RequiresReplace(), "cran")
78+
// c.AddPlanModifier(stringplanmodifier.RequiresReplace(), "egg")
79+
// c.AddPlanModifier(stringplanmodifier.RequiresReplace(), "jar")
80+
// c.AddPlanModifier(objectplanmodifier.RequiresReplace(), "maven")
81+
// c.AddPlanModifier(objectplanmodifier.RequiresReplace(), "pypi")
82+
// c.AddPlanModifier(stringplanmodifier.RequiresReplace(), "requirements")
83+
// c.AddPlanModifier(stringplanmodifier.RequiresReplace(), "whl")
84+
for field, attribute := range c.ToAttributeMap() {
85+
switch attribute.(type) {
86+
case tfschema.StringAttributeBuilder:
87+
c.AddPlanModifier(stringplanmodifier.RequiresReplace(), field)
88+
case tfschema.SingleNestedAttributeBuilder:
89+
c.AddPlanModifier(objectplanmodifier.RequiresReplace(), field)
90+
}
91+
}
92+
return c
93+
}),
94+
}
95+
}
96+
97+
func (r *LibraryResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) {
98+
if r.Client == nil {
99+
r.Client = pluginfwcommon.ConfigureResource(req, resp)
100+
}
101+
}
102+
103+
func (r *LibraryResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) {
104+
w, diags := r.Client.GetWorkspaceClient()
105+
resp.Diagnostics.Append(diags...)
106+
if resp.Diagnostics.HasError() {
107+
return
108+
}
109+
var libraryTfSDK LibraryExtended
110+
resp.Diagnostics.Append(req.Plan.Get(ctx, &libraryTfSDK)...)
111+
if resp.Diagnostics.HasError() {
112+
return
113+
}
114+
115+
var libGoSDK compute.Library
116+
resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, libraryTfSDK, &libGoSDK)...)
117+
if resp.Diagnostics.HasError() {
118+
return
119+
}
120+
installLib := compute.InstallLibraries{
121+
Libraries: []compute.Library{libGoSDK},
122+
}
123+
req.Plan.GetAttribute(ctx, path.Root("cluster_id"), &installLib.ClusterId)
124+
err := w.Libraries.Install(ctx, installLib)
125+
if err != nil {
126+
resp.Diagnostics.AddError("failed to install library", err.Error())
127+
return
128+
}
129+
waitParams := compute.Wait{
130+
ClusterID: installLib.ClusterId,
131+
IsRunning: true,
132+
}
133+
libraryRep := libGoSDK.String()
134+
installedLib := LibraryExtended{}
135+
136+
resp.Diagnostics.Append(readLibrary(ctx, w, waitParams, libraryRep, &installedLib)...)
137+
138+
if resp.Diagnostics.HasError() {
139+
return
140+
}
141+
142+
resp.Diagnostics.Append(resp.State.Set(ctx, installedLib)...)
143+
}
144+
145+
func (r *LibraryResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) {
146+
w, diags := r.Client.GetWorkspaceClient()
147+
resp.Diagnostics.Append(diags...)
148+
if resp.Diagnostics.HasError() {
149+
return
150+
}
151+
var libraryTfSDK LibraryExtended
152+
resp.Diagnostics.Append(req.State.Get(ctx, &libraryTfSDK)...)
153+
if resp.Diagnostics.HasError() {
154+
return
155+
}
156+
var libGoSDK compute.Library
157+
resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, libraryTfSDK, &libGoSDK)...)
158+
if resp.Diagnostics.HasError() {
159+
return
160+
}
161+
clusterId := libraryTfSDK.ClusterId.ValueString()
162+
libraryRep := libGoSDK.String()
163+
installedLib := LibraryExtended{}
164+
waitParams := compute.Wait{
165+
ClusterID: clusterId,
166+
IsRefresh: true,
167+
}
168+
169+
resp.Diagnostics.Append(readLibrary(ctx, w, waitParams, libraryRep, &installedLib)...)
170+
171+
if resp.Diagnostics.HasError() {
172+
return
173+
}
174+
175+
resp.Diagnostics.Append(resp.State.Set(ctx, installedLib)...)
176+
}
177+
178+
func (r *LibraryResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) {
179+
resp.Diagnostics.AddError("failed to update library", "updating library is not supported")
180+
}
181+
182+
func (r *LibraryResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) {
183+
w, diags := r.Client.GetWorkspaceClient()
184+
resp.Diagnostics.Append(diags...)
185+
if resp.Diagnostics.HasError() {
186+
return
187+
}
188+
var libraryTfSDK LibraryExtended
189+
resp.Diagnostics.Append(req.State.Get(ctx, &libraryTfSDK)...)
190+
if resp.Diagnostics.HasError() {
191+
return
192+
}
193+
clusterID := libraryTfSDK.ClusterId.ValueString()
194+
var libGoSDK compute.Library
195+
resp.Diagnostics.Append(converters.TfSdkToGoSdkStruct(ctx, libraryTfSDK, &libGoSDK)...)
196+
if resp.Diagnostics.HasError() {
197+
return
198+
}
199+
libraryRep := libGoSDK.String()
200+
_, err := clusters.StartClusterAndGetInfo(ctx, w, clusterID)
201+
if err != nil {
202+
resp.Diagnostics.AddError("failed to start and get cluster", err.Error())
203+
return
204+
}
205+
cll, err := w.Libraries.ClusterStatusByClusterId(ctx, clusterID)
206+
if err != nil {
207+
resp.Diagnostics.AddError("failed to get libraries", err.Error())
208+
return
209+
}
210+
for _, v := range cll.LibraryStatuses {
211+
if v.Library.String() != libraryRep {
212+
continue
213+
}
214+
err := w.Libraries.Uninstall(ctx, compute.UninstallLibraries{
215+
ClusterId: clusterID,
216+
Libraries: []compute.Library{*v.Library},
217+
})
218+
if err != nil {
219+
resp.Diagnostics.AddError("failed to uninstall library", err.Error())
220+
}
221+
return
222+
}
223+
// Keeping the implementation to be consistent with the sdk-v2 implementation. Eventually we should update this to be not
224+
// an error, for cases such as the library being manually uninstalled.
225+
resp.Diagnostics.AddError("failed to uninstall library", fmt.Sprintf("failed to find %s on %s", libraryRep, clusterID))
226+
}
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
package library_test
2+
3+
import (
4+
"testing"
5+
6+
"github.com/databricks/terraform-provider-databricks/internal/acceptance"
7+
)
8+
9+
func TestAccLibraryCreationPluginFramework(t *testing.T) {
10+
acceptance.WorkspaceLevel(t, acceptance.Step{
11+
Template: `data "databricks_spark_version" "latest" {
12+
}
13+
resource "databricks_cluster" "this" {
14+
cluster_name = "test-library-{var.RANDOM}"
15+
spark_version = data.databricks_spark_version.latest.id
16+
instance_pool_id = "{env.TEST_INSTANCE_POOL_ID}"
17+
autotermination_minutes = 10
18+
num_workers = 0
19+
spark_conf = {
20+
"spark.databricks.cluster.profile" = "singleNode"
21+
"spark.master" = "local[*]"
22+
}
23+
custom_tags = {
24+
"ResourceClass" = "SingleNode"
25+
}
26+
}
27+
resource "databricks_library_pluginframework" "new_library" {
28+
cluster_id = databricks_cluster.this.id
29+
pypi = {
30+
repo = "https://pypi.org/dummy"
31+
package = "databricks-sdk"
32+
}
33+
}
34+
`,
35+
})
36+
}
37+
38+
func TestAccLibraryUpdatePluginFramework(t *testing.T) {
39+
acceptance.WorkspaceLevel(t,
40+
acceptance.Step{
41+
Template: `data "databricks_spark_version" "latest" {
42+
}
43+
resource "databricks_cluster" "this" {
44+
cluster_name = "cluster-{var.STICKY_RANDOM}"
45+
spark_version = data.databricks_spark_version.latest.id
46+
instance_pool_id = "{env.TEST_INSTANCE_POOL_ID}"
47+
autotermination_minutes = 10
48+
num_workers = 0
49+
spark_conf = {
50+
"spark.databricks.cluster.profile" = "singleNode"
51+
"spark.master" = "local[*]"
52+
}
53+
custom_tags = {
54+
"ResourceClass" = "SingleNode"
55+
}
56+
}
57+
resource "databricks_library_pluginframework" "new_library" {
58+
cluster_id = databricks_cluster.this.id
59+
pypi = {
60+
repo = "https://pypi.org/simple"
61+
package = "databricks-sdk"
62+
}
63+
}
64+
`,
65+
},
66+
acceptance.Step{
67+
Template: `data "databricks_spark_version" "latest" {
68+
}
69+
resource "databricks_cluster" "this" {
70+
cluster_name = "cluster-{var.STICKY_RANDOM}"
71+
spark_version = data.databricks_spark_version.latest.id
72+
instance_pool_id = "{env.TEST_INSTANCE_POOL_ID}"
73+
autotermination_minutes = 10
74+
num_workers = 0
75+
spark_conf = {
76+
"spark.databricks.cluster.profile" = "singleNode"
77+
"spark.master" = "local[*]"
78+
}
79+
custom_tags = {
80+
"ResourceClass" = "SingleNode"
81+
}
82+
}
83+
resource "databricks_library_pluginframework" "new_library" {
84+
cluster_id = databricks_cluster.this.id
85+
pypi = {
86+
package = "networkx"
87+
}
88+
}
89+
`,
90+
},
91+
)
92+
}

0 commit comments

Comments
 (0)