Skip to content

Commit 5865797

Browse files
authored
Migrate databricks_catalogs data to Go SDK (#2038)
This PR also introduces `common.WorkspaceData` to reduce code boilerplate.
1 parent c7cd201 commit 5865797

File tree

4 files changed

+59
-13
lines changed

4 files changed

+59
-13
lines changed

catalog/data_catalogs.go

Lines changed: 5 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -3,22 +3,20 @@ package catalog
33
import (
44
"context"
55

6+
"github.com/databricks/databricks-sdk-go"
67
"github.com/databricks/terraform-provider-databricks/common"
78
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
89
)
910

1011
func DataSourceCatalogs() *schema.Resource {
11-
type catalogsData struct {
12+
return common.WorkspaceData(func(ctx context.Context, data *struct {
1213
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
13-
}
14-
return common.DataResource(catalogsData{}, func(ctx context.Context, e any, c *common.DatabricksClient) error {
15-
data := e.(*catalogsData)
16-
catalogsAPI := NewCatalogsAPI(ctx, c)
17-
catalogs, err := catalogsAPI.list()
14+
}, w *databricks.WorkspaceClient) error {
15+
catalogs, err := w.Catalogs.ListAll(ctx)
1816
if err != nil {
1917
return err
2018
}
21-
for _, v := range catalogs.Catalogs {
19+
for _, v := range catalogs {
2220
data.Ids = append(data.Ids, v.Name)
2321
}
2422
return nil

catalog/data_catalogs_test.go

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,9 @@ func TestCatalogsData(t *testing.T) {
2828
Read: true,
2929
NonWritable: true,
3030
ID: "_",
31-
}.ApplyNoError(t)
31+
}.ApplyAndExpectData(t, map[string]any{
32+
"ids": []string{"a", "b"},
33+
})
3234
}
3335

3436
func TestCatalogsData_Error(t *testing.T) {

catalog/resource_catalog.go

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,6 @@ type Catalogs struct {
3232
Catalogs []CatalogInfo `json:"catalogs"`
3333
}
3434

35-
func (a CatalogsAPI) list() (catalogs Catalogs, err error) {
36-
err = a.client.Get(a.context, "/unity-catalog/catalogs", nil, &catalogs)
37-
return
38-
}
39-
4035
func (a CatalogsAPI) createCatalog(ci *CatalogInfo) error {
4136
return a.client.Post(a.context, "/unity-catalog/catalogs", ci, ci)
4237
}

common/resource.go

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ import (
88
"regexp"
99
"strings"
1010

11+
"github.com/databricks/databricks-sdk-go"
1112
"github.com/databricks/databricks-sdk-go/apierr"
1213
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
1314
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
@@ -182,6 +183,7 @@ func makeEmptyBlockSuppressFunc(name string) func(k, old, new string, d *schema.
182183
}
183184
}
184185

186+
// Deprecated: migrate to WorkspaceData
185187
func DataResource(sc any, read func(context.Context, any, *DatabricksClient) error) *schema.Resource {
186188
// TODO: migrate to go1.18 and get schema from second function argument?..
187189
s := StructToSchema(sc, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
@@ -213,3 +215,52 @@ func DataResource(sc any, read func(context.Context, any, *DatabricksClient) err
213215
},
214216
}
215217
}
218+
219+
// WorkspaceData is a generic way to define data resources in Terraform provider.
220+
//
221+
// Example usage:
222+
//
223+
// type catalogsData struct {
224+
// Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
225+
// }
226+
// return common.WorkspaceData(func(ctx context.Context, data *catalogsData, w *databricks.WorkspaceClient) error {
227+
// catalogs, err := w.Catalogs.ListAll(ctx)
228+
// ...
229+
// })
230+
func WorkspaceData[T any](read func(context.Context, *T, *databricks.WorkspaceClient) error) *schema.Resource {
231+
var dummy T
232+
s := StructToSchema(dummy, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
233+
return &schema.Resource{
234+
Schema: s,
235+
ReadContext: func(ctx context.Context, d *schema.ResourceData, m any) (diags diag.Diagnostics) {
236+
defer func() {
237+
// using recoverable() would cause more complex rewrapping of DataToStructPointer & StructToData
238+
if panic := recover(); panic != nil {
239+
diags = diag.Errorf("panic: %v", panic)
240+
}
241+
}()
242+
ptr := reflect.New(reflect.ValueOf(dummy).Type())
243+
DataToReflectValue(d, &schema.Resource{Schema: s}, ptr.Elem())
244+
client := m.(*DatabricksClient)
245+
w, err := client.WorkspaceClient()
246+
if err != nil {
247+
err = nicerError(ctx, err, "read data")
248+
return diag.FromErr(err)
249+
}
250+
err = read(ctx, ptr.Interface().(*T), w)
251+
if err != nil {
252+
err = nicerError(ctx, err, "read data")
253+
diags = diag.FromErr(err)
254+
}
255+
StructToData(ptr.Elem().Interface(), s, d)
256+
// check if the resource schema has the `id` attribute (marked with `json:"id"` in the provided structure).
257+
// and if yes, then use it as resource ID. If not, then use default value for resource ID (`_`)
258+
if _, ok := s["id"]; ok {
259+
d.SetId(d.Get("id").(string))
260+
} else {
261+
d.SetId("_")
262+
}
263+
return
264+
},
265+
}
266+
}

0 commit comments

Comments
 (0)