Skip to content

Commit 5de0ca9

Browse files
authored
Add actions.save_findings.output_config.storage_path field to google_data_loss_prevention_job_trigger. (#15263)
1 parent 26321e1 commit 5de0ca9

File tree

2 files changed

+126
-2
lines changed

2 files changed

+126
-2
lines changed

mmv1/products/dlp/JobTrigger.yaml

Lines changed: 17 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -729,7 +729,7 @@ properties:
729729
send_empty_value: true
730730
allow_empty_object: true
731731
properties:
732-
# Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#SurrogateType
732+
# Meant to be an empty object with no properties - see here : https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#SurrogateType
733733
[]
734734
- name: 'storageConfig'
735735
type: NestedObject
@@ -1085,7 +1085,6 @@ properties:
10851085
type: NestedObject
10861086
description: |
10871087
Information on the location of the target BigQuery Table.
1088-
required: true
10891088
properties:
10901089
- name: 'projectId'
10911090
type: String
@@ -1102,6 +1101,22 @@ properties:
11021101
description: |
11031102
Name of the table. If is not set a new one will be generated for you with the following format:
11041103
`dlp_googleapis_yyyy_mm_dd_[dlp_job_id]`. Pacific timezone will be used for generating the date details.
1104+
- name: 'storagePath'
1105+
type: NestedObject
1106+
description: |
1107+
Store findings in an existing Cloud Storage bucket. Files will be generated with the job ID and file part number
1108+
as the filename, and will contain findings in textproto format as SaveToGcsFindingsOutput. The file name will use
1109+
the naming convention <job_id>-<shard_number>, for example: my-job-id-2.
1110+
1111+
Supported for InspectJobs. The bucket must not be the same as the bucket being inspected. If storing findings to
1112+
Cloud Storage, the output schema field should not be set. If set, it will be ignored.
1113+
properties:
1114+
- name: 'path'
1115+
type: String
1116+
description: |
1117+
A URL representing a file or path (no wildcards) in Cloud Storage.
1118+
Example: `gs://[BUCKET_NAME]/dictionary.txt`
1119+
required: true
11051120
- name: 'outputSchema'
11061121
type: Enum
11071122
description: |

mmv1/third_party/terraform/services/datalossprevention/resource_data_loss_prevention_job_trigger_test.go

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -513,6 +513,41 @@ func TestAccDataLossPreventionJobTrigger_dlpJobTriggerCreateWithTimespanConfigBi
513513
})
514514
}
515515

516+
func TestAccDataLossPreventionJobTrigger_dlpJobTriggerSaveToCloudStorage(t *testing.T) {
517+
t.Parallel()
518+
519+
context := map[string]interface{}{
520+
"project": envvar.GetTestProjectFromEnv(),
521+
"random_suffix": acctest.RandString(t, 10),
522+
}
523+
524+
acctest.VcrTest(t, resource.TestCase{
525+
PreCheck: func() { acctest.AccTestPreCheck(t) },
526+
ProtoV5ProviderFactories: acctest.ProtoV5ProviderFactories(t),
527+
CheckDestroy: testAccCheckDataLossPreventionJobTriggerDestroyProducer(t),
528+
Steps: []resource.TestStep{
529+
{
530+
Config: testAccDataLossPreventionJobTrigger_inspectUpdateSaveToCloudStorage(context),
531+
},
532+
{
533+
ResourceName: "google_data_loss_prevention_job_trigger.basic",
534+
ImportState: true,
535+
ImportStateVerify: true,
536+
ImportStateVerifyIgnore: []string{"parent"},
537+
},
538+
{
539+
Config: testAccDataLossPreventionJobTrigger_inspectUpdateSaveToCloudStorageUpdate(context),
540+
},
541+
{
542+
ResourceName: "google_data_loss_prevention_job_trigger.basic",
543+
ImportState: true,
544+
ImportStateVerify: true,
545+
ImportStateVerifyIgnore: []string{"parent"},
546+
},
547+
},
548+
})
549+
}
550+
516551
func testAccDataLossPreventionJobTrigger_dlpJobTriggerBasic(context map[string]interface{}) string {
517552
return acctest.Nprintf(`
518553
resource "google_data_loss_prevention_job_trigger" "basic" {
@@ -2827,3 +2862,77 @@ resource "google_data_loss_prevention_job_trigger" "bigquery_row_limit_timespan"
28272862
}
28282863
`, context)
28292864
}
2865+
2866+
func testAccDataLossPreventionJobTrigger_inspectUpdateSaveToCloudStorage(context map[string]interface{}) string {
2867+
return acctest.Nprintf(`
2868+
resource "google_data_loss_prevention_job_trigger" "basic" {
2869+
parent = "projects/%{project}"
2870+
description = "Starting description"
2871+
display_name = "display"
2872+
2873+
triggers {
2874+
schedule {
2875+
recurrence_period_duration = "86400s"
2876+
}
2877+
}
2878+
2879+
inspect_job {
2880+
inspect_template_name = "fake"
2881+
actions {
2882+
save_findings {
2883+
output_config {
2884+
storage_path {
2885+
path = "gs://mybucket/save-path/"
2886+
}
2887+
}
2888+
}
2889+
}
2890+
storage_config {
2891+
cloud_storage_options {
2892+
file_set {
2893+
url = "gs://mybucket/directory/"
2894+
}
2895+
file_types = ["POWERPOINT", "EXCEL", "CSV", "TSV"]
2896+
}
2897+
}
2898+
}
2899+
}
2900+
`, context)
2901+
}
2902+
2903+
func testAccDataLossPreventionJobTrigger_inspectUpdateSaveToCloudStorageUpdate(context map[string]interface{}) string {
2904+
return acctest.Nprintf(`
2905+
resource "google_data_loss_prevention_job_trigger" "basic" {
2906+
parent = "projects/%{project}"
2907+
description = "Starting description"
2908+
display_name = "display"
2909+
2910+
triggers {
2911+
schedule {
2912+
recurrence_period_duration = "86400s"
2913+
}
2914+
}
2915+
2916+
inspect_job {
2917+
inspect_template_name = "fake"
2918+
actions {
2919+
save_findings {
2920+
output_config {
2921+
storage_path {
2922+
path = "gs://mybucket/save-path-updated/"
2923+
}
2924+
}
2925+
}
2926+
}
2927+
storage_config {
2928+
cloud_storage_options {
2929+
file_set {
2930+
url = "gs://mybucket/directory/"
2931+
}
2932+
file_types = ["POWERPOINT", "EXCEL", "CSV", "TSV"]
2933+
}
2934+
}
2935+
}
2936+
}
2937+
`, context)
2938+
}

0 commit comments

Comments
 (0)