Skip to content

Commit 69ed1b0

Browse files
committed
added basic resources
1 parent dc941fe commit 69ed1b0

9 files changed

+2498
-0
lines changed
Lines changed: 307 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,307 @@
1+
// Copyright IBM Corp. 2025 All Rights Reserved.
2+
// Licensed under the Mozilla Public License v2.0
3+
4+
/*
5+
* IBM OpenAPI Terraform Generator Version: 3.107.1-41b0fbd0-20250825-080732
6+
*/
7+
8+
package vpc
9+
10+
import (
11+
"context"
12+
"fmt"
13+
"log"
14+
15+
"github.com/hashicorp/terraform-plugin-sdk/v2/diag"
16+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
17+
18+
"github.com/IBM-Cloud/terraform-provider-ibm/ibm/conns"
19+
"github.com/IBM-Cloud/terraform-provider-ibm/ibm/flex"
20+
"github.com/IBM/go-sdk-core/v5/core"
21+
"github.ibm.com/ibmcloud/vpc-go-sdk/vpcv1"
22+
)
23+
24+
func DataSourceIBMIsVolumeJob() *schema.Resource {
25+
return &schema.Resource{
26+
ReadContext: dataSourceIBMIsVolumeJobRead,
27+
28+
Schema: map[string]*schema.Schema{
29+
"volume_id": &schema.Schema{
30+
Type: schema.TypeString,
31+
Required: true,
32+
Description: "The volume identifier.",
33+
},
34+
"is_volume_job_id": &schema.Schema{
35+
Type: schema.TypeString,
36+
Required: true,
37+
Description: "The volume job identifier.",
38+
},
39+
"auto_delete": &schema.Schema{
40+
Type: schema.TypeBool,
41+
Computed: true,
42+
Description: "Indicates whether this volume job will be automatically deleted after it completes. At present, this is always `false`, but may be modifiable in the future.",
43+
},
44+
"completed_at": &schema.Schema{
45+
Type: schema.TypeString,
46+
Computed: true,
47+
Description: "The date and time that the volume job was completed.If absent, the volume job has not yet completed.",
48+
},
49+
"created_at": &schema.Schema{
50+
Type: schema.TypeString,
51+
Computed: true,
52+
Description: "The date and time that the volume job was created.",
53+
},
54+
"estimated_completion_at": &schema.Schema{
55+
Type: schema.TypeString,
56+
Computed: true,
57+
Description: "The date and time that the volume job is estimated to complete.If absent, the volume job is still queued and has not yet started.",
58+
},
59+
"href": &schema.Schema{
60+
Type: schema.TypeString,
61+
Computed: true,
62+
Description: "The URL for this volume job.",
63+
},
64+
"job_type": &schema.Schema{
65+
Type: schema.TypeString,
66+
Computed: true,
67+
Description: "The type of volume job.The enumerated values for this property may[expand](https://cloud.ibm.com/apidocs/vpc#property-value-expansion) in the future.",
68+
},
69+
"name": &schema.Schema{
70+
Type: schema.TypeString,
71+
Computed: true,
72+
Description: "The name for this volume job. The name must not be used by another volume job for this volume.",
73+
},
74+
"resource_type": &schema.Schema{
75+
Type: schema.TypeString,
76+
Computed: true,
77+
Description: "The resource type.",
78+
},
79+
"started_at": &schema.Schema{
80+
Type: schema.TypeString,
81+
Computed: true,
82+
Description: "The date and time that the volume job was started.If absent, the volume job has not yet started.",
83+
},
84+
"status": &schema.Schema{
85+
Type: schema.TypeString,
86+
Computed: true,
87+
Description: "The status of this volume job:- `deleting`: job is being deleted- `failed`: job could not be completed successfully- `queued`: job is queued- `running`: job is in progress- `succeeded`: job was completed successfully- `canceling`: job is being canceled- `canceled`: job is canceledThe enumerated values for this property may[expand](https://cloud.ibm.com/apidocs/vpc#property-value-expansion) in the future.",
88+
},
89+
"status_reasons": &schema.Schema{
90+
Type: schema.TypeList,
91+
Computed: true,
92+
Description: "The reasons for the current status (if any).",
93+
Elem: &schema.Resource{
94+
Schema: map[string]*schema.Schema{
95+
"code": &schema.Schema{
96+
Type: schema.TypeString,
97+
Computed: true,
98+
Description: "A snake case string succinctly identifying the status reason.The enumerated values for this property may[expand](https://cloud.ibm.com/apidocs/vpc#property-value-expansion) in the future.",
99+
},
100+
"message": &schema.Schema{
101+
Type: schema.TypeString,
102+
Computed: true,
103+
Description: "An explanation of the status reason.",
104+
},
105+
"more_info": &schema.Schema{
106+
Type: schema.TypeString,
107+
Computed: true,
108+
Description: "A link to documentation about this status reason.",
109+
},
110+
},
111+
},
112+
},
113+
"parameters": &schema.Schema{
114+
Type: schema.TypeList,
115+
Computed: true,
116+
Description: "The parameters to use after the volume is migrated.",
117+
Elem: &schema.Resource{
118+
Schema: map[string]*schema.Schema{
119+
"bandwidth": &schema.Schema{
120+
Type: schema.TypeInt,
121+
Computed: true,
122+
Description: "The maximum bandwidth (in megabits per second) for the volume.If specified, the volume profile must not have a `bandwidth.type` of `dependent`.",
123+
},
124+
"iops": &schema.Schema{
125+
Type: schema.TypeInt,
126+
Computed: true,
127+
Description: "The maximum I/O operations per second (IOPS) for this volume.If specified, the volume profile must not have a `iops.type` of `dependent`.",
128+
},
129+
"profile": &schema.Schema{
130+
Type: schema.TypeList,
131+
Computed: true,
132+
Description: "Identifies a volume profile by a unique property.",
133+
Elem: &schema.Resource{
134+
Schema: map[string]*schema.Schema{
135+
"name": &schema.Schema{
136+
Type: schema.TypeString,
137+
Computed: true,
138+
Description: "The globally unique name for this volume profile.",
139+
},
140+
"href": &schema.Schema{
141+
Type: schema.TypeString,
142+
Computed: true,
143+
Description: "The URL for this volume profile.",
144+
},
145+
},
146+
},
147+
},
148+
},
149+
},
150+
},
151+
},
152+
}
153+
}
154+
155+
func dataSourceIBMIsVolumeJobRead(context context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics {
156+
vpcClient, err := meta.(conns.ClientSession).VpcV1()
157+
if err != nil {
158+
tfErr := flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_is_volume_job", "read", "initialize-client")
159+
log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage())
160+
return tfErr.GetDiag()
161+
}
162+
163+
getVolumeJobOptions := &vpcv1.GetVolumeJobOptions{}
164+
165+
getVolumeJobOptions.SetVolumeID(d.Get("volume_id").(string))
166+
getVolumeJobOptions.SetID(d.Get("is_volume_job_id").(string))
167+
168+
volumeJobIntf, _, err := vpcClient.GetVolumeJobWithContext(context, getVolumeJobOptions)
169+
if err != nil {
170+
tfErr := flex.TerraformErrorf(err, fmt.Sprintf("GetVolumeJobWithContext failed: %s", err.Error()), "(Data) ibm_is_volume_job", "read")
171+
log.Printf("[DEBUG]\n%s", tfErr.GetDebugMessage())
172+
return tfErr.GetDiag()
173+
}
174+
volumeJob := volumeJobIntf.(*vpcv1.VolumeJob)
175+
176+
d.SetId(fmt.Sprintf("%s/%s", *getVolumeJobOptions.VolumeID, *getVolumeJobOptions.ID))
177+
178+
if err = d.Set("auto_delete", volumeJob.AutoDelete); err != nil {
179+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting auto_delete: %s", err), "(Data) ibm_is_volume_job", "read", "set-auto_delete").GetDiag()
180+
}
181+
182+
if !core.IsNil(volumeJob.CompletedAt) {
183+
if err = d.Set("completed_at", flex.DateTimeToString(volumeJob.CompletedAt)); err != nil {
184+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting completed_at: %s", err), "(Data) ibm_is_volume_job", "read", "set-completed_at").GetDiag()
185+
}
186+
}
187+
188+
if err = d.Set("created_at", flex.DateTimeToString(volumeJob.CreatedAt)); err != nil {
189+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting created_at: %s", err), "(Data) ibm_is_volume_job", "read", "set-created_at").GetDiag()
190+
}
191+
192+
if !core.IsNil(volumeJob.EstimatedCompletionAt) {
193+
if err = d.Set("estimated_completion_at", flex.DateTimeToString(volumeJob.EstimatedCompletionAt)); err != nil {
194+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting estimated_completion_at: %s", err), "(Data) ibm_is_volume_job", "read", "set-estimated_completion_at").GetDiag()
195+
}
196+
}
197+
198+
if err = d.Set("href", volumeJob.Href); err != nil {
199+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting href: %s", err), "(Data) ibm_is_volume_job", "read", "set-href").GetDiag()
200+
}
201+
202+
if err = d.Set("job_type", volumeJob.JobType); err != nil {
203+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting job_type: %s", err), "(Data) ibm_is_volume_job", "read", "set-job_type").GetDiag()
204+
}
205+
206+
if err = d.Set("name", volumeJob.Name); err != nil {
207+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting name: %s", err), "(Data) ibm_is_volume_job", "read", "set-name").GetDiag()
208+
}
209+
210+
if err = d.Set("resource_type", volumeJob.ResourceType); err != nil {
211+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting resource_type: %s", err), "(Data) ibm_is_volume_job", "read", "set-resource_type").GetDiag()
212+
}
213+
214+
if !core.IsNil(volumeJob.StartedAt) {
215+
if err = d.Set("started_at", flex.DateTimeToString(volumeJob.StartedAt)); err != nil {
216+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting started_at: %s", err), "(Data) ibm_is_volume_job", "read", "set-started_at").GetDiag()
217+
}
218+
}
219+
220+
if err = d.Set("status", volumeJob.Status); err != nil {
221+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting status: %s", err), "(Data) ibm_is_volume_job", "read", "set-status").GetDiag()
222+
}
223+
224+
statusReasons := []map[string]interface{}{}
225+
for _, statusReasonsItem := range volumeJob.StatusReasons {
226+
statusReasonsItemMap, err := DataSourceIBMIsVolumeJobVolumeJobStatusReasonToMap(&statusReasonsItem) // #nosec G601
227+
if err != nil {
228+
return flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_is_volume_job", "read", "status_reasons-to-map").GetDiag()
229+
}
230+
statusReasons = append(statusReasons, statusReasonsItemMap)
231+
}
232+
if err = d.Set("status_reasons", statusReasons); err != nil {
233+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting status_reasons: %s", err), "(Data) ibm_is_volume_job", "read", "set-status_reasons").GetDiag()
234+
}
235+
236+
if !core.IsNil(volumeJob.Parameters) {
237+
parameters := []map[string]interface{}{}
238+
parametersMap, err := DataSourceIBMIsVolumeJobVolumeJobTypeMigrateParametersToMap(volumeJob.Parameters)
239+
if err != nil {
240+
return flex.DiscriminatedTerraformErrorf(err, err.Error(), "(Data) ibm_is_volume_job", "read", "parameters-to-map").GetDiag()
241+
}
242+
parameters = append(parameters, parametersMap)
243+
if err = d.Set("parameters", parameters); err != nil {
244+
return flex.DiscriminatedTerraformErrorf(err, fmt.Sprintf("Error setting parameters: %s", err), "(Data) ibm_is_volume_job", "read", "set-parameters").GetDiag()
245+
}
246+
}
247+
248+
return nil
249+
}
250+
251+
func DataSourceIBMIsVolumeJobVolumeJobStatusReasonToMap(model *vpcv1.VolumeJobStatusReason) (map[string]interface{}, error) {
252+
modelMap := make(map[string]interface{})
253+
modelMap["code"] = *model.Code
254+
modelMap["message"] = *model.Message
255+
if model.MoreInfo != nil {
256+
modelMap["more_info"] = *model.MoreInfo
257+
}
258+
return modelMap, nil
259+
}
260+
261+
func DataSourceIBMIsVolumeJobVolumeJobTypeMigrateParametersToMap(model *vpcv1.VolumeJobTypeMigrateParameters) (map[string]interface{}, error) {
262+
modelMap := make(map[string]interface{})
263+
if model.Bandwidth != nil {
264+
modelMap["bandwidth"] = flex.IntValue(model.Bandwidth)
265+
}
266+
if model.Iops != nil {
267+
modelMap["iops"] = flex.IntValue(model.Iops)
268+
}
269+
profileMap, err := DataSourceIBMIsVolumeJobVolumeProfileIdentityToMap(model.Profile)
270+
if err != nil {
271+
return modelMap, err
272+
}
273+
modelMap["profile"] = []map[string]interface{}{profileMap}
274+
return modelMap, nil
275+
}
276+
277+
func DataSourceIBMIsVolumeJobVolumeProfileIdentityToMap(model vpcv1.VolumeProfileIdentityIntf) (map[string]interface{}, error) {
278+
if _, ok := model.(*vpcv1.VolumeProfileIdentityByName); ok {
279+
return DataSourceIBMIsVolumeJobVolumeProfileIdentityByNameToMap(model.(*vpcv1.VolumeProfileIdentityByName))
280+
} else if _, ok := model.(*vpcv1.VolumeProfileIdentityByHref); ok {
281+
return DataSourceIBMIsVolumeJobVolumeProfileIdentityByHrefToMap(model.(*vpcv1.VolumeProfileIdentityByHref))
282+
} else if _, ok := model.(*vpcv1.VolumeProfileIdentity); ok {
283+
modelMap := make(map[string]interface{})
284+
model := model.(*vpcv1.VolumeProfileIdentity)
285+
if model.Name != nil {
286+
modelMap["name"] = *model.Name
287+
}
288+
if model.Href != nil {
289+
modelMap["href"] = *model.Href
290+
}
291+
return modelMap, nil
292+
} else {
293+
return nil, fmt.Errorf("Unrecognized vpcv1.VolumeProfileIdentityIntf subtype encountered")
294+
}
295+
}
296+
297+
func DataSourceIBMIsVolumeJobVolumeProfileIdentityByNameToMap(model *vpcv1.VolumeProfileIdentityByName) (map[string]interface{}, error) {
298+
modelMap := make(map[string]interface{})
299+
modelMap["name"] = *model.Name
300+
return modelMap, nil
301+
}
302+
303+
func DataSourceIBMIsVolumeJobVolumeProfileIdentityByHrefToMap(model *vpcv1.VolumeProfileIdentityByHref) (map[string]interface{}, error) {
304+
modelMap := make(map[string]interface{})
305+
modelMap["href"] = *model.Href
306+
return modelMap, nil
307+
}

0 commit comments

Comments
 (0)