Skip to content

Commit afcac0f

Browse files
authored
chore: Implements Read in TPF data source (#2865)
* enable data source test checks * ds model * same Read impl for singular ds as resource Read * Read in plural ds * use conversion.CopyModel to reduce duplicated code * check out is not nil * remove unneeded funcs * pluralModel * overrideUsingLegacySchema * fix condition to use legacy schema * remove use of config.AdvancedClusterV2Schema in tests * fix pinned_fcv * revert test changes * refactor ds to use readClustersDS and readClusterDS * enable ds test checks * refactor plural ds to use ListClusters info * leftover comment * revert temporarily overrideUsingLegacySchema * split convertClusterAddAdvConfig into getBasicClusterModel and updateModelAdvancedConfig * pagination comment in ListClusters * check if use_replication_spec_per_shard should be added in ds * bring changes from master in resource.go * CopyModel tests * have resource and datasource models together, renaming resource_schema files to schema * rename AsymmetricShardUnsupportedError to AsymmetricShardUnsupported * use AllPages * wait in applyAdvancedConfigurationChanges * await after every update * don't include accept_data_risks_and_force_replica_set_reconfig in ds * move ds schemas together with resource one * wait to apply adv_config changes in Create * leftover * skip TestAccClusterAdvancedClusterConfig_asymmetricShardedNewSchema * leftover
1 parent b59c241 commit afcac0f

13 files changed

+425
-124
lines changed

.golangci.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ issues:
123123
- linters:
124124
- gocritic
125125
text: "^hugeParam: req is heavy"
126-
- path: "_schema\\.go" # exclude rules for schema files as it's auto-genereated from OpenAPI spec
126+
- path: "schema\\.go" # exclude rules for schema files as it's auto-genereated from OpenAPI spec
127127
text: "fieldalignment|hugeParam|var-naming|ST1003|S1007|exceeds the maximum|too long|regexpSimplify|nolint"
128128
run:
129129
timeout: 10m
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
package conversion
2+
3+
import (
4+
"fmt"
5+
"reflect"
6+
)
7+
8+
// CopyModel creates a new struct with the same values as the source struct. Fields in destination struct that are not in source are left with zero value.
9+
func CopyModel[T any](src any) (*T, error) {
10+
dest := new(T)
11+
valSrc := reflect.ValueOf(src)
12+
valDest := reflect.ValueOf(dest)
13+
if valSrc.Kind() != reflect.Ptr || valDest.Kind() != reflect.Ptr {
14+
return nil, fmt.Errorf("params must be pointers")
15+
}
16+
valSrc = valSrc.Elem()
17+
valDest = valDest.Elem()
18+
if valSrc.Kind() != reflect.Struct || valDest.Kind() != reflect.Struct {
19+
return nil, fmt.Errorf("params must be pointers to structs")
20+
}
21+
typeSrc := valSrc.Type()
22+
typeDest := valDest.Type()
23+
for i := 0; i < typeDest.NumField(); i++ {
24+
fieldDest := typeDest.Field(i)
25+
name := fieldDest.Name
26+
{
27+
fieldSrc, found := typeSrc.FieldByName(name)
28+
if !found {
29+
continue
30+
}
31+
if fieldDest.Type != fieldSrc.Type {
32+
return nil, fmt.Errorf("field has different type: %s", name)
33+
}
34+
}
35+
if !valDest.Field(i).CanSet() {
36+
return nil, fmt.Errorf("field can't be set, probably unexported: %s", name)
37+
}
38+
valDest.Field(i).Set(valSrc.FieldByName(name))
39+
}
40+
return dest, nil
41+
}
Lines changed: 92 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,92 @@
1+
package conversion_test
2+
3+
import (
4+
"testing"
5+
6+
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
7+
"github.com/stretchr/testify/assert"
8+
"github.com/stretchr/testify/require"
9+
)
10+
11+
func TestCopyModel(t *testing.T) {
12+
type destType struct {
13+
AttrStr string
14+
attrUnexported string
15+
AttrInt int
16+
}
17+
18+
testCases := map[string]struct {
19+
input any
20+
expected any
21+
expectedErrorStr string
22+
}{
23+
"basic": {
24+
input: &struct {
25+
AttrStr string
26+
AttrInt int
27+
}{
28+
AttrStr: "val",
29+
AttrInt: 1,
30+
},
31+
expected: &destType{
32+
AttrStr: "val",
33+
AttrInt: 1,
34+
attrUnexported: "",
35+
},
36+
},
37+
"missing field": {
38+
input: &struct {
39+
AttrStr string
40+
}{
41+
AttrStr: "val",
42+
},
43+
expected: &destType{
44+
AttrStr: "val",
45+
},
46+
},
47+
"extra field": {
48+
input: &struct {
49+
AttrStr string
50+
AttrExtra string
51+
AttrInt int
52+
}{
53+
AttrStr: "val",
54+
AttrExtra: "extra",
55+
AttrInt: 1,
56+
},
57+
expected: &destType{
58+
AttrStr: "val",
59+
AttrInt: 1,
60+
},
61+
},
62+
"different type": {
63+
input: &struct {
64+
AttrStr bool
65+
}{
66+
AttrStr: true,
67+
},
68+
expectedErrorStr: "field has different type: AttrStr",
69+
},
70+
"unexported": {
71+
input: &struct {
72+
attrUnexported string
73+
}{
74+
attrUnexported: "val",
75+
},
76+
expectedErrorStr: "field can't be set, probably unexported: attrUnexported",
77+
},
78+
}
79+
for name, tc := range testCases {
80+
t.Run(name, func(t *testing.T) {
81+
dest, err := conversion.CopyModel[destType](tc.input)
82+
if err == nil {
83+
assert.Equal(t, tc.expected, dest)
84+
assert.Equal(t, "", tc.expectedErrorStr)
85+
} else {
86+
require.ErrorContains(t, err, tc.expectedErrorStr)
87+
assert.Nil(t, dest)
88+
assert.Nil(t, tc.expected)
89+
}
90+
})
91+
}
92+
}

internal/service/advancedcluster/resource_advanced_cluster_test.go

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -706,6 +706,8 @@ func TestAccClusterAdvancedClusterConfig_symmetricShardedNewSchemaToAsymmetricAd
706706
}
707707

708708
func TestAccClusterAdvancedClusterConfig_asymmetricShardedNewSchema(t *testing.T) {
709+
// TODO: enable when datasource attribute use_replication_spec_per_shard is used
710+
acc.SkipIfAdvancedClusterV2Schema(t)
709711
resource.ParallelTest(t, asymmetricShardedNewSchemaTestCase(t, true))
710712
}
711713

internal/service/advancedclustertpf/data_source.go

Lines changed: 51 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2,12 +2,13 @@ package advancedclustertpf
22

33
import (
44
"context"
5+
"fmt"
56

67
"github.com/hashicorp/terraform-plugin-framework/datasource"
7-
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
8-
"github.com/hashicorp/terraform-plugin-framework/types"
8+
"github.com/hashicorp/terraform-plugin-framework/diag"
99
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
1010
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
11+
"go.mongodb.org/atlas-sdk/v20241113003/admin"
1112
)
1213

1314
var _ datasource.DataSource = &ds{}
@@ -26,22 +27,57 @@ type ds struct {
2627
}
2728

2829
func (d *ds) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
29-
resp.Schema = conversion.DataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.DataSourceSchemaRequest{
30-
RequiredFields: []string{"project_id", "name"},
31-
OverridenFields: map[string]schema.Attribute{
32-
"use_replication_spec_per_shard": schema.BoolAttribute{ // TODO: added as in current resource
33-
Optional: true,
34-
MarkdownDescription: "use_replication_spec_per_shard", // TODO: add documentation
35-
},
36-
},
37-
})
30+
resp.Schema = dataSourceSchema(ctx)
3831
}
3932

4033
func (d *ds) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
34+
var state TFModelDS
35+
diags := &resp.Diagnostics
36+
diags.Append(req.Config.Get(ctx, &state)...)
37+
if diags.HasError() {
38+
return
39+
}
40+
model := d.readCluster(ctx, diags, &state)
41+
if model != nil {
42+
diags.Append(resp.State.Set(ctx, model)...)
43+
}
4144
}
4245

43-
// TODO: see if resource model can be used instead, probably different only in timeouts
44-
type ModelDS struct {
45-
ProjectID types.String `tfsdk:"project_id"`
46-
Name types.String `tfsdk:"name"`
46+
func (d *ds) readCluster(ctx context.Context, diags *diag.Diagnostics, modelDS *TFModelDS) *TFModelDS {
47+
clusterName := modelDS.Name.ValueString()
48+
projectID := modelDS.ProjectID.ValueString()
49+
useReplicationSpecPerShard := modelDS.UseReplicationSpecPerShard.ValueBool()
50+
api := d.Client.AtlasV2.ClustersApi
51+
clusterResp, _, err := api.GetCluster(ctx, projectID, clusterName).Execute()
52+
if err != nil {
53+
if admin.IsErrorCode(err, ErrorCodeClusterNotFound) {
54+
return nil
55+
}
56+
diags.AddError("errorRead", fmt.Sprintf(errorRead, clusterName, err.Error()))
57+
return nil
58+
}
59+
modelIn := &TFModel{
60+
ProjectID: modelDS.ProjectID,
61+
Name: modelDS.Name,
62+
}
63+
// TODO: pass !UseReplicationSpecPerShard to overrideUsingLegacySchema
64+
modelOut, extraInfo := getBasicClusterModel(ctx, diags, d.Client, clusterResp, modelIn)
65+
if diags.HasError() {
66+
return nil
67+
}
68+
if extraInfo.AsymmetricShardUnsupported && !useReplicationSpecPerShard {
69+
diags.AddError("errorRead", "Please add `use_replication_spec_per_shard = true` to your data source configuration to enable asymmetric shard support. Refer to documentation for more details.")
70+
return nil
71+
}
72+
updateModelAdvancedConfig(ctx, diags, d.Client, modelOut, nil, nil)
73+
if diags.HasError() {
74+
return nil
75+
}
76+
modelOutDS, err := conversion.CopyModel[TFModelDS](modelOut)
77+
if err != nil {
78+
diags.AddError(errorRead, fmt.Sprintf("error setting model: %s", err.Error()))
79+
return nil
80+
}
81+
modelOutDS.UseReplicationSpecPerShard = modelDS.UseReplicationSpecPerShard // attrs not in resource model
82+
return modelOutDS
4783
}

internal/service/advancedclustertpf/model_ClusterDescription20240805.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@ type ExtraAPIInfo struct {
2525
RootDiskSize *float64
2626
ContainerIDs map[string]string
2727
UsingLegacySchema bool
28+
AsymmetricShardUnsupported bool
2829
}
2930

3031
func NewTFModel(ctx context.Context, input *admin.ClusterDescription20240805, timeout timeouts.Value, diags *diag.Diagnostics, apiInfo ExtraAPIInfo) *TFModel {

internal/service/advancedclustertpf/plural_data_source.go

Lines changed: 63 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -3,12 +3,15 @@ package advancedclustertpf
33
import (
44
"context"
55
"fmt"
6+
"net/http"
67

78
"github.com/hashicorp/terraform-plugin-framework/datasource"
8-
"github.com/hashicorp/terraform-plugin-framework/datasource/schema"
9+
"github.com/hashicorp/terraform-plugin-framework/diag"
910
"github.com/hashicorp/terraform-plugin-framework/types"
1011
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/conversion"
12+
"github.com/mongodb/terraform-provider-mongodbatlas/internal/common/dsschema"
1113
"github.com/mongodb/terraform-provider-mongodbatlas/internal/config"
14+
"go.mongodb.org/atlas-sdk/v20241113003/admin"
1215
)
1316

1417
var _ datasource.DataSource = &pluralDS{}
@@ -27,26 +30,68 @@ type pluralDS struct {
2730
}
2831

2932
func (d *pluralDS) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) {
30-
resp.Schema = conversion.PluralDataSourceSchemaFromResource(ResourceSchema(ctx), &conversion.PluralDataSourceSchemaRequest{
31-
RequiredFields: []string{"project_id"},
32-
OverridenRootFields: map[string]schema.Attribute{
33-
"use_replication_spec_per_shard": schema.BoolAttribute{ // TODO: added as in current resource
34-
Optional: true,
35-
MarkdownDescription: "use_replication_spec_per_shard", // TODO: add documentation
36-
},
37-
"include_deleted_with_retained_backups": schema.BoolAttribute{ // TODO: not in current resource, decide if keep
38-
Optional: true,
39-
MarkdownDescription: "Flag that indicates whether to return Clusters with retain backups.",
40-
},
41-
},
42-
})
33+
resp.Schema = pluralDataSourceSchema(ctx)
4334
}
4435

4536
func (d *pluralDS) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) {
37+
var state TFModelPluralDS
38+
diags := &resp.Diagnostics
39+
diags.Append(req.Config.Get(ctx, &state)...)
40+
if diags.HasError() {
41+
return
42+
}
43+
model := d.readClusters(ctx, diags, &state)
44+
if model != nil {
45+
diags.Append(resp.State.Set(ctx, model)...)
46+
}
4647
}
4748

48-
type AdvancedClustersModel struct {
49-
ProjectID types.String `tfsdk:"project_id"`
50-
UseReplicationSpecPerShard types.Bool `tfsdk:"use_replication_spec_per_shard"` // TODO: added as in current resource
51-
IncludeDeletedWithRetainedBackups types.Bool `tfsdk:"include_deleted_with_retained_backups"` // TODO: not in current resource, decide if keep
49+
func (d *pluralDS) readClusters(ctx context.Context, diags *diag.Diagnostics, pluralModel *TFModelPluralDS) *TFModelPluralDS {
50+
projectID := pluralModel.ProjectID.ValueString()
51+
useReplicationSpecPerShard := pluralModel.UseReplicationSpecPerShard.ValueBool()
52+
api := d.Client.AtlasV2.ClustersApi
53+
params := admin.ListClustersApiParams{
54+
GroupId: projectID,
55+
}
56+
list, err := dsschema.AllPages(ctx, func(ctx context.Context, pageNum int) (dsschema.PaginateResponse[admin.ClusterDescription20240805], *http.Response, error) {
57+
request := api.ListClustersWithParams(ctx, &params)
58+
request = request.PageNum(pageNum)
59+
return request.Execute()
60+
})
61+
if err != nil {
62+
diags.AddError("errorList", fmt.Sprintf(errorList, projectID, err.Error()))
63+
return nil
64+
}
65+
outs := &TFModelPluralDS{
66+
ProjectID: pluralModel.ProjectID,
67+
UseReplicationSpecPerShard: pluralModel.UseReplicationSpecPerShard,
68+
IncludeDeletedWithRetainedBackups: pluralModel.IncludeDeletedWithRetainedBackups,
69+
}
70+
for i := range list {
71+
clusterResp := &list[i]
72+
modelIn := &TFModel{
73+
ProjectID: pluralModel.ProjectID,
74+
Name: types.StringValue(clusterResp.GetName()),
75+
}
76+
// TODO: pass !UseReplicationSpecPerShard to overrideUsingLegacySchema
77+
modelOut, extraInfo := getBasicClusterModel(ctx, diags, d.Client, clusterResp, modelIn)
78+
if diags.HasError() {
79+
return nil
80+
}
81+
if extraInfo.AsymmetricShardUnsupported && !useReplicationSpecPerShard {
82+
continue
83+
}
84+
updateModelAdvancedConfig(ctx, diags, d.Client, modelOut, nil, nil)
85+
if diags.HasError() {
86+
return nil
87+
}
88+
modelOutDS, err := conversion.CopyModel[TFModelDS](modelOut)
89+
if err != nil {
90+
diags.AddError(errorList, fmt.Sprintf("error setting model: %s", err.Error()))
91+
return nil
92+
}
93+
modelOutDS.UseReplicationSpecPerShard = pluralModel.UseReplicationSpecPerShard // attrs not in resource model
94+
outs.Results = append(outs.Results, modelOutDS)
95+
}
96+
return outs
5297
}

0 commit comments

Comments
 (0)