Skip to content

Commit 9add4d3

Browse files
Add Terraform support for dataplex recommendation api (#13959) (#23255)
[upstream:e5e629b14584988fbf87cb39478e0e344083be91] Signed-off-by: Modular Magician <[email protected]>
1 parent 0e5c558 commit 9add4d3

File tree

5 files changed

+612
-0
lines changed

5 files changed

+612
-0
lines changed

.changelog/13959.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:new-datasource
2+
`google_dataplex_data_quality_rules`
3+
```

google/provider/provider_mmv1_resources.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,7 @@ var handwrittenDatasources = map[string]*schema.Resource{
281281
"google_container_engine_versions": container.DataSourceGoogleContainerEngineVersions(),
282282
"google_container_registry_image": containeranalysis.DataSourceGoogleContainerImage(),
283283
"google_container_registry_repository": containeranalysis.DataSourceGoogleContainerRepo(),
284+
"google_dataplex_data_quality_rules": dataplex.DataSourceDataplexDataQualityRules(),
284285
"google_dataproc_metastore_service": dataprocmetastore.DataSourceDataprocMetastoreService(),
285286
"google_datastream_static_ips": datastream.DataSourceGoogleDatastreamStaticIps(),
286287
"google_dns_keys": dns.DataSourceDNSKeys(),
Lines changed: 350 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,350 @@
1+
// Copyright (c) HashiCorp, Inc.
2+
// SPDX-License-Identifier: MPL-2.0
3+
// ----------------------------------------------------------------------------
4+
//
5+
// *** AUTO GENERATED CODE *** Type: Handwritten ***
6+
//
7+
// ----------------------------------------------------------------------------
8+
//
9+
// This code is generated by Magic Modules using the following:
10+
//
11+
// Source file: https://github.com/GoogleCloudPlatform/magic-modules/tree/main/mmv1/third_party/terraform/services/dataplex/data_source_dataplex_data_quality_rules.go
12+
//
13+
// DO NOT EDIT this file directly. Any changes made to this file will be
14+
// overwritten during the next generation cycle.
15+
//
16+
// ----------------------------------------------------------------------------
17+
package dataplex
18+
19+
import (
20+
"fmt"
21+
"strings"
22+
"unicode"
23+
24+
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema"
25+
"github.com/hashicorp/terraform-provider-google/google/tpgresource"
26+
transport_tpg "github.com/hashicorp/terraform-provider-google/google/transport"
27+
)
28+
29+
func DataSourceDataplexDataQualityRules() *schema.Resource {
30+
return &schema.Resource{
31+
Read: dataSourceDataplexDataQualityRulesRead,
32+
Schema: map[string]*schema.Schema{
33+
"project": {
34+
Type: schema.TypeString,
35+
Optional: true,
36+
},
37+
"location": {
38+
Type: schema.TypeString,
39+
Optional: true,
40+
},
41+
"data_scan_id": {
42+
Type: schema.TypeString,
43+
Required: true,
44+
},
45+
"rules": {
46+
Type: schema.TypeList,
47+
Computed: true,
48+
Elem: &schema.Resource{
49+
Schema: map[string]*schema.Schema{
50+
"column": {
51+
Type: schema.TypeString,
52+
Computed: true,
53+
Description: `The unnested column which this rule is evaluated against.`,
54+
},
55+
"ignore_null": {
56+
Type: schema.TypeBool,
57+
Computed: true,
58+
Description: `Rows with null values will automatically fail a rule, unless ignoreNull is true. In that case, such null rows are trivially considered passing.
59+
This field is only valid for the following type of rules: RangeExpectation, RegexExpectation, SetExpectation, UniquenessExpectation`,
60+
},
61+
"dimension": {
62+
Type: schema.TypeString,
63+
Computed: true,
64+
Description: `The dimension a rule belongs to. Supported dimensions are "COMPLETENESS", "ACCURACY", "CONSISTENCY", "VALIDITY", "UNIQUENESS", "FRESHNESS", "VOLUME"`,
65+
},
66+
"threshold": {
67+
Type: schema.TypeFloat,
68+
Computed: true,
69+
Description: `The minimum ratio of passing_rows / total_rows required to pass this rule, with a range of [0.0, 1.0]. 0 indicates default value (i.e. 1.0). This field is only valid for row-level type rules.`,
70+
},
71+
"name": {
72+
Type: schema.TypeString,
73+
Computed: true,
74+
Description: `A mutable name for the rule.
75+
The name must contain only letters (a-z, A-Z), numbers (0-9), or hyphens (-).
76+
The maximum length is 63 characters.
77+
Must start with a letter.
78+
Must end with a number or a letter.`,
79+
},
80+
"description": {
81+
Type: schema.TypeString,
82+
Computed: true,
83+
Description: `Description of the rule. (The maximum length is 1,024 characters.)`,
84+
},
85+
"suspended": {
86+
Type: schema.TypeBool,
87+
Computed: true,
88+
Description: `Whether the Rule is active or suspended. Default is false.`,
89+
},
90+
"range_expectation": {
91+
Type: schema.TypeList,
92+
Computed: true,
93+
Elem: &schema.Resource{
94+
Schema: map[string]*schema.Schema{
95+
"min_value": {
96+
Type: schema.TypeString,
97+
Computed: true,
98+
Description: `The minimum column value allowed for a row to pass this validation.`,
99+
},
100+
"max_value": {
101+
Type: schema.TypeString,
102+
Computed: true,
103+
Description: `The maximum column value allowed for a row to pass this validation.`,
104+
},
105+
"strict_min_enabled": {
106+
Type: schema.TypeBool,
107+
Computed: true,
108+
Description: `Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed.`,
109+
},
110+
"strict_max_enabled": {
111+
Type: schema.TypeBool,
112+
Computed: true,
113+
Description: ` Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed.`,
114+
},
115+
},
116+
},
117+
Description: `Row-level rule which evaluates whether each column value lies between a specified range.`,
118+
},
119+
"non_null_expectation": {
120+
Type: schema.TypeList,
121+
Computed: true,
122+
Description: `Row-level rule which evaluates whether each column value is null.`,
123+
Elem: &schema.Resource{
124+
Schema: map[string]*schema.Schema{},
125+
},
126+
},
127+
"set_expectation": {
128+
Type: schema.TypeList,
129+
Computed: true,
130+
Elem: &schema.Resource{
131+
Schema: map[string]*schema.Schema{
132+
"values": {
133+
Type: schema.TypeList,
134+
Computed: true,
135+
Elem: &schema.Schema{Type: schema.TypeString},
136+
Description: `Expected values for the column value.`,
137+
},
138+
},
139+
},
140+
Description: `Row-level rule which evaluates whether each column value is contained by a specified set.`,
141+
},
142+
"regex_expectation": {
143+
Type: schema.TypeList,
144+
Computed: true,
145+
Elem: &schema.Resource{
146+
Schema: map[string]*schema.Schema{
147+
"regex": {
148+
Type: schema.TypeString,
149+
Computed: true,
150+
Description: `A regular expression the column value is expected to match.`,
151+
},
152+
},
153+
},
154+
155+
Description: `Row-level rule which evaluates whether each column value matches a specified regex.`,
156+
},
157+
"uniqueness_expectation": {
158+
Type: schema.TypeList,
159+
Computed: true,
160+
Description: `Row-level rule which evaluates whether each column value is unique.`,
161+
Elem: &schema.Resource{
162+
Schema: map[string]*schema.Schema{},
163+
},
164+
},
165+
"statistic_range_expectation": {
166+
Type: schema.TypeList,
167+
Computed: true,
168+
Elem: &schema.Resource{
169+
Schema: map[string]*schema.Schema{
170+
"statistic": {
171+
Type: schema.TypeString,
172+
Computed: true,
173+
Description: `The list of aggregate metrics a rule can be evaluated against.
174+
Possible values: ["STATISTIC_UNDEFINED", "MEAN", "MIN", "MAX"]`,
175+
},
176+
"min_value": {
177+
Type: schema.TypeString,
178+
Computed: true,
179+
Description: `The minimum column value allowed for a row to pass this validation.`,
180+
},
181+
"max_value": {
182+
Type: schema.TypeString,
183+
Computed: true,
184+
Description: `The maximum column value allowed for a row to pass this validation.`,
185+
},
186+
"strict_min_enabled": {
187+
Type: schema.TypeBool,
188+
Computed: true,
189+
Description: `Whether each value needs to be strictly greater than ('>') the minimum, or if equality is allowed.`,
190+
},
191+
"strict_max_enabled": {
192+
Type: schema.TypeBool,
193+
Computed: true,
194+
Description: ` Whether each value needs to be strictly lesser than ('<') the maximum, or if equality is allowed.`,
195+
},
196+
},
197+
},
198+
Description: `Aggregate rule which evaluates whether the column aggregate statistic lies between a specified range.`,
199+
},
200+
"row_condition_expectation": {
201+
Type: schema.TypeList,
202+
Computed: true,
203+
Elem: &schema.Resource{
204+
Schema: map[string]*schema.Schema{
205+
"sql_expression": {
206+
Type: schema.TypeString,
207+
Computed: true,
208+
Description: `The SQL expression.`,
209+
},
210+
},
211+
},
212+
Description: `Row-level rule which evaluates whether each row in a table passes the specified condition.`,
213+
},
214+
"table_condition_expectation": {
215+
Type: schema.TypeList,
216+
Computed: true,
217+
Elem: &schema.Resource{
218+
Schema: map[string]*schema.Schema{
219+
"sql_expression": {
220+
Type: schema.TypeString,
221+
Computed: true,
222+
Description: `The SQL expression.`,
223+
},
224+
},
225+
},
226+
Description: `Aggregate rule which evaluates whether the provided expression is true for a table.`,
227+
},
228+
"sql_assertion": {
229+
Type: schema.TypeList,
230+
Computed: true,
231+
Elem: &schema.Resource{
232+
Schema: map[string]*schema.Schema{
233+
"sql_statement": {
234+
Type: schema.TypeString,
235+
Computed: true,
236+
Description: `The SQL expression.`,
237+
},
238+
},
239+
},
240+
Description: `Aggregate rule which evaluates the number of rows returned for the provided statement. If any rows are returned, this rule fails.`,
241+
},
242+
},
243+
},
244+
},
245+
},
246+
}
247+
}
248+
249+
func camelToSnake(s string) string {
250+
var result strings.Builder
251+
for i, ch := range s {
252+
if unicode.IsUpper(ch) {
253+
if i > 0 {
254+
result.WriteByte('_')
255+
}
256+
result.WriteRune(unicode.ToLower(ch))
257+
} else {
258+
result.WriteRune(ch)
259+
}
260+
}
261+
return result.String()
262+
}
263+
264+
func flattenDataSourceDataplexDataQualityRulesExpectation(expectation interface{}) []interface{} {
265+
expectationsToSet := make(map[string]interface{})
266+
267+
if expectation == nil {
268+
return []interface{}{expectationsToSet}
269+
}
270+
271+
originalExpectation := expectation.(map[string]interface{})
272+
for k, v := range originalExpectation {
273+
snakeCaseKey := camelToSnake(k)
274+
expectationsToSet[snakeCaseKey] = v
275+
}
276+
return []interface{}{expectationsToSet}
277+
}
278+
279+
func flattenDataSourceDataplexDataQualityRulesRules(rules interface{}) []interface{} {
280+
rulesToSet := make([]interface{}, 0)
281+
282+
originalRules := rules.([]interface{})
283+
284+
for _, rule := range originalRules {
285+
286+
newRuleMap := make(map[string]interface{})
287+
ruleMap := rule.(map[string]interface{})
288+
289+
for k, v := range ruleMap {
290+
snakeCaseKey := camelToSnake(k)
291+
if strings.HasSuffix(k, "Expectation") {
292+
// For expectation fields, need extra flatten
293+
newRuleMap[snakeCaseKey] = flattenDataSourceDataplexDataQualityRulesExpectation(v)
294+
} else {
295+
// For other fields (column, dimension, threshold, etc.), directly assign
296+
newRuleMap[snakeCaseKey] = v
297+
}
298+
}
299+
rulesToSet = append(rulesToSet, newRuleMap)
300+
}
301+
302+
return rulesToSet
303+
}
304+
305+
func dataSourceDataplexDataQualityRulesRead(d *schema.ResourceData, meta interface{}) error {
306+
config := meta.(*transport_tpg.Config)
307+
userAgent, err := tpgresource.GenerateUserAgentString(d, config.UserAgent)
308+
if err != nil {
309+
return err
310+
}
311+
312+
project, err := tpgresource.GetProject(d, config)
313+
if err != nil {
314+
return err
315+
}
316+
317+
location, err := tpgresource.GetLocation(d, config)
318+
if err != nil {
319+
return err
320+
}
321+
322+
data_scan_id := d.Get("data_scan_id").(string)
323+
324+
url, err := tpgresource.ReplaceVars(d, config, "{{DataplexBasePath}}projects/{{project}}/locations/{{location}}/dataScans/{{data_scan_id}}:generateDataQualityRules")
325+
if err != nil {
326+
return err
327+
}
328+
329+
id := fmt.Sprintf("projects/%s/locations/%s/dataScans/%s", project, location, data_scan_id)
330+
d.SetId(id)
331+
332+
res, err := transport_tpg.SendRequest(transport_tpg.SendRequestOptions{
333+
Config: config,
334+
Method: "POST",
335+
Project: project,
336+
RawURL: url,
337+
UserAgent: userAgent,
338+
ErrorAbortPredicates: []transport_tpg.RetryErrorPredicateFunc{transport_tpg.Is429QuotaError},
339+
})
340+
341+
if err != nil {
342+
return transport_tpg.HandleDataSourceNotFoundError(err, d, fmt.Sprintf("DataQualityRules %q", d.Id()), url)
343+
}
344+
345+
if err := d.Set("rules", flattenDataSourceDataplexDataQualityRulesRules(res["rule"])); err != nil {
346+
return fmt.Errorf("Error setting rule: %s", err)
347+
}
348+
349+
return nil
350+
}

0 commit comments

Comments
 (0)