Skip to content

Commit 53337d0

Browse files
authored
Make common.DataResource deterministic (#1279)
1 parent 0b679a6 commit 53337d0

File tree

7 files changed

+90
-15
lines changed

7 files changed

+90
-15
lines changed

catalog/data_catalogs.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,11 @@ import (
88
)
99

1010
func DataSourceCatalogs() *schema.Resource {
11-
var data struct {
11+
type catalogsData struct {
1212
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
1313
}
14-
return common.DataResource(&data, func(ctx context.Context, c *common.DatabricksClient) error {
14+
return common.DataResource(catalogsData{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error {
15+
data := e.(*catalogsData)
1516
catalogsAPI := NewCatalogsAPI(ctx, c)
1617
catalogs, err := catalogsAPI.list()
1718
if err != nil {

catalog/data_schemas.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,11 +8,12 @@ import (
88
)
99

1010
func DataSourceSchemas() *schema.Resource {
11-
var data struct {
11+
type schemasData struct {
1212
CatalogName string `json:"catalog_name"`
1313
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
1414
}
15-
return common.DataResource(&data, func(ctx context.Context, c *common.DatabricksClient) error {
15+
return common.DataResource(schemasData{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error {
16+
data := e.(*schemasData)
1617
schemasAPI := NewSchemasAPI(ctx, c)
1718
schemas, err := schemasAPI.listByCatalog(data.CatalogName)
1819
if err != nil {

catalog/data_tables.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -8,12 +8,13 @@ import (
88
)
99

1010
func DataSourceTables() *schema.Resource {
11-
var data struct {
11+
type tablesData struct {
1212
CatalogName string `json:"catalog_name"`
1313
SchemaName string `json:"schema_name"`
1414
Ids []string `json:"ids,omitempty" tf:"computed,slice_set"`
1515
}
16-
return common.DataResource(&data, func(ctx context.Context, c *common.DatabricksClient) error {
16+
return common.DataResource(tablesData{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error {
17+
data := e.(*tablesData)
1718
tablesAPI := NewTablesAPI(ctx, c)
1819
tables, err := tablesAPI.listTables(data.CatalogName, data.SchemaName)
1920
if err != nil {

catalog/data_tables_test.go

Lines changed: 67 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,9 @@ import (
44
"testing"
55

66
"github.com/databrickslabs/terraform-provider-databricks/qa"
7+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
8+
"github.com/stretchr/testify/assert"
9+
"github.com/stretchr/testify/require"
710
)
811

912
func TestTablesData(t *testing.T) {
@@ -34,6 +37,70 @@ func TestTablesData(t *testing.T) {
3437
}.ApplyNoError(t)
3538
}
3639

40+
// https://github.com/databrickslabs/terraform-provider-databricks/issues/1264
41+
func TestTablesDataIssue1264(t *testing.T) {
42+
r := DataSourceTables()
43+
d, err := qa.ResourceFixture{
44+
Fixtures: []qa.HTTPFixture{
45+
{
46+
Method: "GET",
47+
Resource: "/api/2.0/unity-catalog/tables/?catalog_name=a&schema_name=b",
48+
Response: Tables{
49+
Tables: []TableInfo{
50+
{
51+
Name: "a",
52+
},
53+
{
54+
Name: "b",
55+
},
56+
},
57+
},
58+
},
59+
},
60+
Resource: r,
61+
HCL: `
62+
catalog_name = "a"
63+
schema_name = "b"`,
64+
Read: true,
65+
NonWritable: true,
66+
ID: "_",
67+
}.Apply(t)
68+
require.NoError(t, err)
69+
s := d.Get("ids").(*schema.Set)
70+
assert.Equal(t, 2, s.Len())
71+
assert.True(t, s.Contains("..a"))
72+
73+
d, err = qa.ResourceFixture{
74+
Fixtures: []qa.HTTPFixture{
75+
{
76+
Method: "GET",
77+
Resource: "/api/2.0/unity-catalog/tables/?catalog_name=a&schema_name=b",
78+
Response: Tables{
79+
Tables: []TableInfo{
80+
{
81+
Name: "c",
82+
},
83+
{
84+
Name: "d",
85+
},
86+
},
87+
},
88+
},
89+
},
90+
Resource: r,
91+
HCL: `
92+
catalog_name = "a"
93+
schema_name = "b"`,
94+
Read: true,
95+
NonWritable: true,
96+
ID: "_",
97+
}.Apply(t)
98+
require.NoError(t, err)
99+
s = d.Get("ids").(*schema.Set)
100+
assert.Equal(t, 2, s.Len())
101+
assert.True(t, s.Contains("..c"))
102+
}
103+
37104
func TestTablesData_Error(t *testing.T) {
38105
qa.ResourceFixture{
39106
Fixtures: qa.HTTPFailures,

common/reflect_resource_test.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -583,11 +583,12 @@ func TestDataToReflectValueBypass(t *testing.T) {
583583

584584
func TestDataResource(t *testing.T) {
585585
r := func() *schema.Resource {
586-
var dto struct {
586+
type entry struct {
587587
In string `json:"in"`
588588
Out string `json:"out,omitempty" tf:"computed"`
589589
}
590-
return DataResource(&dto, func(ctx context.Context, c *DatabricksClient) error {
590+
return DataResource(entry{}, func(ctx context.Context, e interface{}, c *DatabricksClient) error {
591+
dto := e.(*entry)
591592
dto.Out = "out: " + dto.In
592593
if dto.In == "fail" {
593594
return fmt.Errorf("happens")

common/resource.go

Lines changed: 8 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@ import (
44
"context"
55
"fmt"
66
"log"
7+
"reflect"
78
"regexp"
89
"strings"
910

@@ -175,8 +176,9 @@ func makeEmptyBlockSuppressFunc(name string) func(k, old, new string, d *schema.
175176
}
176177
}
177178

178-
func DataResource(e interface{}, read func(context.Context, *DatabricksClient) error) *schema.Resource {
179-
s := StructToSchema(e, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
179+
func DataResource(sc interface{}, read func(context.Context, interface{}, *DatabricksClient) error) *schema.Resource {
180+
// TODO: migrate to go1.18 and get schema from second function argument?..
181+
s := StructToSchema(sc, func(m map[string]*schema.Schema) map[string]*schema.Schema { return m })
180182
return &schema.Resource{
181183
Schema: s,
182184
ReadContext: func(ctx context.Context, d *schema.ResourceData, m interface{}) (diags diag.Diagnostics) {
@@ -186,13 +188,14 @@ func DataResource(e interface{}, read func(context.Context, *DatabricksClient) e
186188
diags = diag.Errorf("panic: %v", panic)
187189
}
188190
}()
189-
DataToStructPointer(d, s, e)
190-
err := read(ctx, m.(*DatabricksClient))
191+
ptr := reflect.New(reflect.ValueOf(sc).Type())
192+
DataToReflectValue(d, &schema.Resource{Schema: s}, ptr.Elem())
193+
err := read(ctx, ptr.Interface(), m.(*DatabricksClient))
191194
if err != nil {
192195
err = nicerError(ctx, err, "read data")
193196
diags = diag.FromErr(err)
194197
}
195-
StructToData(e, s, d)
198+
StructToData(ptr.Elem().Interface(), s, d)
196199
d.SetId("_")
197200
return
198201
},

jobs/data_jobs.go

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,11 @@ import (
99
)
1010

1111
func DataSourceJobs() *schema.Resource {
12-
var response struct {
12+
type jobsData struct {
1313
Ids map[string]string `json:"ids,omitempty" tf:"computed"`
1414
}
15-
return common.DataResource(&response, func(ctx context.Context, c *common.DatabricksClient) error {
15+
return common.DataResource(jobsData{}, func(ctx context.Context, e interface{}, c *common.DatabricksClient) error {
16+
response := e.(*jobsData)
1617
jobsAPI := NewJobsAPI(ctx, c)
1718
list, err := jobsAPI.List()
1819
if err != nil {

0 commit comments

Comments
 (0)