Skip to content

Commit 75c5592

Browse files
Add two outputs for dlp job trigger: outputting to SCC and DataCatalog (#6855) (#5127)
Co-authored-by: Shuya Ma <[email protected]> Fixes hashicorp/terraform-provider-google#11861 Signed-off-by: Modular Magician <[email protected]> Signed-off-by: Modular Magician <[email protected]>
1 parent 8bcedb2 commit 75c5592

File tree

4 files changed

+219
-4
lines changed

4 files changed

+219
-4
lines changed

.changelog/6855.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
dlp: added `publish_findings_to_cloud_data_catalog` and `publish_summary_to_cscc` to `google_data_loss_prevention_job_trigger` resource
3+
```

google-beta/resource_data_loss_prevention_job_trigger.go

Lines changed: 85 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -118,10 +118,30 @@ A duration in seconds with up to nine fractional digits, terminated by 's'. Exam
118118
},
119119
ExactlyOneOf: []string{},
120120
},
121+
"publish_findings_to_cloud_data_catalog": {
122+
Type: schema.TypeList,
123+
Optional: true,
124+
Description: `Publish findings of a DlpJob to Data Catalog.`,
125+
MaxItems: 1,
126+
Elem: &schema.Resource{
127+
Schema: map[string]*schema.Schema{},
128+
},
129+
ExactlyOneOf: []string{},
130+
},
131+
"publish_summary_to_cscc": {
132+
Type: schema.TypeList,
133+
Optional: true,
134+
Description: `Publish the result summary of a DlpJob to the Cloud Security Command Center.`,
135+
MaxItems: 1,
136+
Elem: &schema.Resource{
137+
Schema: map[string]*schema.Schema{},
138+
},
139+
ExactlyOneOf: []string{},
140+
},
121141
"save_findings": {
122142
Type: schema.TypeList,
123143
Optional: true,
124-
Description: `Schedule for triggered jobs`,
144+
Description: `If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk`,
125145
MaxItems: 1,
126146
Elem: &schema.Resource{
127147
Schema: map[string]*schema.Schema{
@@ -1235,8 +1255,10 @@ func flattenDataLossPreventionJobTriggerInspectJobActions(v interface{}, d *sche
12351255
continue
12361256
}
12371257
transformed = append(transformed, map[string]interface{}{
1238-
"save_findings": flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindings(original["saveFindings"], d, config),
1239-
"pub_sub": flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pubSub"], d, config),
1258+
"save_findings": flattenDataLossPreventionJobTriggerInspectJobActionsSaveFindings(original["saveFindings"], d, config),
1259+
"pub_sub": flattenDataLossPreventionJobTriggerInspectJobActionsPubSub(original["pubSub"], d, config),
1260+
"publish_summary_to_cscc": flattenDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(original["publishSummaryToCscc"], d, config),
1261+
"publish_findings_to_cloud_data_catalog": flattenDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(original["publishFindingsToCloudDataCatalog"], d, config),
12401262
})
12411263
}
12421264
return transformed
@@ -1319,6 +1341,22 @@ func flattenDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface
13191341
return v
13201342
}
13211343

1344+
func flattenDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1345+
if v == nil {
1346+
return nil
1347+
}
1348+
transformed := make(map[string]interface{})
1349+
return []interface{}{transformed}
1350+
}
1351+
1352+
func flattenDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(v interface{}, d *schema.ResourceData, config *Config) interface{} {
1353+
if v == nil {
1354+
return nil
1355+
}
1356+
transformed := make(map[string]interface{})
1357+
return []interface{}{transformed}
1358+
}
1359+
13221360
func expandDataLossPreventionJobTriggerDescription(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
13231361
return v, nil
13241362
}
@@ -1914,6 +1952,20 @@ func expandDataLossPreventionJobTriggerInspectJobActions(v interface{}, d Terraf
19141952
transformed["pubSub"] = transformedPubSub
19151953
}
19161954

1955+
transformedPublishSummaryToCscc, err := expandDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(original["publish_summary_to_cscc"], d, config)
1956+
if err != nil {
1957+
return nil, err
1958+
} else {
1959+
transformed["publishSummaryToCscc"] = transformedPublishSummaryToCscc
1960+
}
1961+
1962+
transformedPublishFindingsToCloudDataCatalog, err := expandDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(original["publish_findings_to_cloud_data_catalog"], d, config)
1963+
if err != nil {
1964+
return nil, err
1965+
} else {
1966+
transformed["publishFindingsToCloudDataCatalog"] = transformedPublishFindingsToCloudDataCatalog
1967+
}
1968+
19171969
req = append(req, transformed)
19181970
}
19191971
return req, nil
@@ -2036,6 +2088,36 @@ func expandDataLossPreventionJobTriggerInspectJobActionsPubSubTopic(v interface{
20362088
return v, nil
20372089
}
20382090

2091+
func expandDataLossPreventionJobTriggerInspectJobActionsPublishSummaryToCscc(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
2092+
l := v.([]interface{})
2093+
if len(l) == 0 {
2094+
return nil, nil
2095+
}
2096+
2097+
if l[0] == nil {
2098+
transformed := make(map[string]interface{})
2099+
return transformed, nil
2100+
}
2101+
transformed := make(map[string]interface{})
2102+
2103+
return transformed, nil
2104+
}
2105+
2106+
func expandDataLossPreventionJobTriggerInspectJobActionsPublishFindingsToCloudDataCatalog(v interface{}, d TerraformResourceData, config *Config) (interface{}, error) {
2107+
l := v.([]interface{})
2108+
if len(l) == 0 {
2109+
return nil, nil
2110+
}
2111+
2112+
if l[0] == nil {
2113+
transformed := make(map[string]interface{})
2114+
return transformed, nil
2115+
}
2116+
transformed := make(map[string]interface{})
2117+
2118+
return transformed, nil
2119+
}
2120+
20392121
func resourceDataLossPreventionJobTriggerEncoder(d *schema.ResourceData, meta interface{}, obj map[string]interface{}) (map[string]interface{}, error) {
20402122
newObj := make(map[string]interface{})
20412123
newObj["jobTrigger"] = obj

google-beta/resource_data_loss_prevention_job_trigger_generated_test.go

Lines changed: 122 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -222,6 +222,128 @@ resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit_percentag
222222
`, context)
223223
}
224224

225+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(t *testing.T) {
226+
t.Parallel()
227+
228+
context := map[string]interface{}{
229+
"project": getTestProjectFromEnv(),
230+
"random_suffix": randString(t, 10),
231+
}
232+
233+
vcrTest(t, resource.TestCase{
234+
PreCheck: func() { testAccPreCheck(t) },
235+
Providers: testAccProviders,
236+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
237+
Steps: []resource.TestStep{
238+
{
239+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(context),
240+
},
241+
{
242+
ResourceName: "google_data_loss_prevention_job_trigger.data_catalog_output",
243+
ImportState: true,
244+
ImportStateVerify: true,
245+
ImportStateVerifyIgnore: []string{"parent"},
246+
},
247+
},
248+
})
249+
}
250+
251+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerDataCatalogOutputExample(context map[string]interface{}) string {
252+
return Nprintf(`
253+
resource "google_data_loss_prevention_job_trigger" "data_catalog_output" {
254+
parent = "projects/%{project}"
255+
description = "Description"
256+
display_name = "Displayname"
257+
258+
triggers {
259+
schedule {
260+
recurrence_period_duration = "86400s"
261+
}
262+
}
263+
264+
inspect_job {
265+
inspect_template_name = "fake"
266+
actions {
267+
publish_findings_to_cloud_data_catalog {
268+
}
269+
}
270+
storage_config {
271+
big_query_options {
272+
table_reference {
273+
project_id = "project"
274+
dataset_id = "dataset"
275+
table_id = "table_to_scan"
276+
}
277+
rows_limit_percent = 50
278+
sample_method = "RANDOM_START"
279+
}
280+
}
281+
}
282+
}
283+
`, context)
284+
}
285+
286+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(t *testing.T) {
287+
t.Parallel()
288+
289+
context := map[string]interface{}{
290+
"project": getTestProjectFromEnv(),
291+
"random_suffix": randString(t, 10),
292+
}
293+
294+
vcrTest(t, resource.TestCase{
295+
PreCheck: func() { testAccPreCheck(t) },
296+
Providers: testAccProviders,
297+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
298+
Steps: []resource.TestStep{
299+
{
300+
Config: testAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(context),
301+
},
302+
{
303+
ResourceName: "google_data_loss_prevention_job_trigger.scc_output",
304+
ImportState: true,
305+
ImportStateVerify: true,
306+
ImportStateVerifyIgnore: []string{"parent"},
307+
},
308+
},
309+
})
310+
}
311+
312+
func testAccDataLossPreventionJobTrigger_dlpJobTriggerSccOutputExample(context map[string]interface{}) string {
313+
return Nprintf(`
314+
resource "google_data_loss_prevention_job_trigger" "scc_output" {
315+
parent = "projects/%{project}"
316+
description = "Description"
317+
display_name = "Displayname"
318+
319+
triggers {
320+
schedule {
321+
recurrence_period_duration = "86400s"
322+
}
323+
}
324+
325+
inspect_job {
326+
inspect_template_name = "fake"
327+
actions {
328+
publish_summary_to_cscc {
329+
}
330+
}
331+
storage_config {
332+
big_query_options {
333+
table_reference {
334+
project_id = "project"
335+
dataset_id = "dataset"
336+
table_id = "table_to_scan"
337+
}
338+
rows_limit_percent = 50
339+
sample_method = "RANDOM_START"
340+
}
341+
}
342+
}
343+
}
344+
`, context)
345+
}
346+
225347
func testAccCheckDataLossPreventionJobTriggerDestroyProducer(t *testing.T) func(s *terraform.State) error {
226348
return func(s *terraform.State) error {
227349
for name, rs := range s.RootModule().Resources {

website/docs/r/data_loss_prevention_job_trigger.html.markdown

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -437,14 +437,22 @@ The following arguments are supported:
437437

438438
* `save_findings` -
439439
(Optional)
440-
Schedule for triggered jobs
440+
If set, the detailed findings will be persisted to the specified OutputStorageConfig. Only a single instance of this action can be specified. Compatible with: Inspect, Risk
441441
Structure is [documented below](#nested_save_findings).
442442

443443
* `pub_sub` -
444444
(Optional)
445445
Publish a message into a given Pub/Sub topic when the job completes.
446446
Structure is [documented below](#nested_pub_sub).
447447

448+
* `publish_summary_to_cscc` -
449+
(Optional)
450+
Publish the result summary of a DlpJob to the Cloud Security Command Center.
451+
452+
* `publish_findings_to_cloud_data_catalog` -
453+
(Optional)
454+
Publish findings of a DlpJob to Data Catalog.
455+
448456

449457
<a name="nested_save_findings"></a>The `save_findings` block supports:
450458

0 commit comments

Comments
 (0)