Skip to content

Commit 41e3cff

Browse files
Add enable_streaming_engine argument to google_dataflow_job (#4585) (#3049)
* Add enable_streaming_engine argument to google_dataflow_job This should address hashicorp/terraform-provider-google#8649 * address PR feedback Signed-off-by: Modular Magician <[email protected]>
1 parent 384384c commit 41e3cff

File tree

4 files changed

+13
-1
lines changed

4 files changed

+13
-1
lines changed

.changelog/4585.txt

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
```release-note:enhancement
2+
dataflow: added `enable_streaming_engine` argument
3+
```

google-beta/resource_dataflow_flex_template_job_test.go

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ import (
88

99
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
1010
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
11-
"google.golang.org/api/compute/v1"
11+
compute "google.golang.org/api/compute/v1"
1212
)
1313

1414
func TestAccDataflowFlexTemplateJob_basic(t *testing.T) {

google-beta/resource_dataflow_job.go

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -198,6 +198,12 @@ func resourceDataflowJob() *schema.Resource {
198198
Computed: true,
199199
Description: `The unique ID of this job.`,
200200
},
201+
202+
"enable_streaming_engine": {
203+
Type: schema.TypeBool,
204+
Optional: true,
205+
Description: `Indicates if the job should use the streaming engine feature.`,
206+
},
201207
},
202208
UseJSONNumber: true,
203209
}
@@ -540,6 +546,7 @@ func resourceDataflowJobSetupEnv(d *schema.ResourceData, config *Config) (datafl
540546
MachineType: d.Get("machine_type").(string),
541547
KmsKeyName: d.Get("kms_key_name").(string),
542548
IpConfiguration: d.Get("ip_configuration").(string),
549+
EnableStreamingEngine: d.Get("enable_streaming_engine").(bool),
543550
AdditionalUserLabels: labels,
544551
Zone: zone,
545552
AdditionalExperiments: additionalExperiments,

website/docs/r/dataflow_job.html.markdown

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,6 +43,7 @@ resource "google_dataflow_job" "pubsub_stream" {
4343
name = "tf-test-dataflow-job1"
4444
template_gcs_path = "gs://my-bucket/templates/template_file"
4545
temp_gcs_location = "gs://my-bucket/tmp_dir"
46+
enable_streaming_engine = true
4647
parameters = {
4748
inputFilePattern = "${google_storage_bucket.bucket1.url}/*.json"
4849
outputTopic = google_pubsub_topic.topic.id
@@ -90,6 +91,7 @@ The following arguments are supported:
9091
* `kms_key_name` - (Optional) The name for the Cloud KMS key for the job. Key format is: `projects/PROJECT_ID/locations/LOCATION/keyRings/KEY_RING/cryptoKeys/KEY`
9192
* `ip_configuration` - (Optional) The configuration for VM IPs. Options are `"WORKER_IP_PUBLIC"` or `"WORKER_IP_PRIVATE"`.
9293
* `additional_experiments` - (Optional) List of experiments that should be used by the job. An example value is `["enable_stackdriver_agent_metrics"]`.
94+
* `enable_streaming_engine` - (Optional) Enable/disable the use of [Streaming Engine](https://cloud.google.com/dataflow/docs/guides/deploying-a-pipeline#streaming-engine) for the job. Note that Streaming Engine is enabled by default for pipelines developed against the Beam SDK for Python v2.21.0 or later when using Python 3.
9395

9496
## Attributes Reference
9597

0 commit comments

Comments
 (0)