File tree Expand file tree Collapse file tree 3 files changed +45
-1
lines changed Expand file tree Collapse file tree 3 files changed +45
-1
lines changed Original file line number Diff line number Diff line change 22
33All notable changes to this project will be documented in this file.
44
5+ ## [ 3.14.1] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/compare/v3.14.0...v3.14.1 ) (2025-01-08)
6+
7+
8+ ### Bug Fixes
9+
10+ * Cloudwatch policy for pipes ([ #140 ] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/issues/140 ) ) ([ 57626e4] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/commit/57626e4826fee0fa3969d706aa54285f24b81bdd ) )
11+
12+ ## [ 3.14.0] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/compare/v3.13.0...v3.14.0 ) (2025-01-07)
13+
14+
15+ ### Features
16+
17+ * Added dynamic block for managed_streaming_kafka_parameters ([ #148 ] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/issues/148 ) ) ([ 7621d52] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/commit/7621d52f7143c8f142bc2f8ab0093d6653e4907f ) )
18+
519## [ 3.13.0] ( https://github.com/terraform-aws-modules/terraform-aws-eventbridge/compare/v3.12.0...v3.13.0 ) (2024-11-27)
620
721
Original file line number Diff line number Diff line change @@ -60,7 +60,16 @@ locals {
6060 matching_services = [" batch" ]
6161 },
6262 logs = {
63- values = [v.target],
63+ values = flatten ([
64+ " ${ v . target } :*" ,
65+ [
66+ for pipe in var . pipes : [
67+ for log_config in try ([pipe . log_configuration ], []) : [
68+ for cloudwatch_log in try ([log_config . cloudwatch_logs_log_destination ], []) : " ${ cloudwatch_log . log_group_arn } :*"
69+ ]
70+ ]
71+ ]
72+ ]),
6473 matching_services = [" logs" ]
6574 },
6675 ecs = {
Original file line number Diff line number Diff line change @@ -679,6 +679,27 @@ resource "aws_pipes_pipe" "this" {
679679 }
680680 }
681681
682+ dynamic "managed_streaming_kafka_parameters" {
683+ for_each = try ([source_parameters . value . managed_streaming_kafka_parameters ], [])
684+
685+ content {
686+ batch_size = try (managed_streaming_kafka_parameters. value . batch_size , null )
687+ maximum_batching_window_in_seconds = try (managed_streaming_kafka_parameters. value . maximum_batching_window_in_seconds , null )
688+ consumer_group_id = try (managed_streaming_kafka_parameters. value . consumer_group_id , null )
689+ starting_position = try (managed_streaming_kafka_parameters. value . starting_position , null )
690+ topic_name = try (managed_streaming_kafka_parameters. value . topic_name , null )
691+
692+ dynamic "credentials" {
693+ for_each = try ([managed_streaming_kafka_parameters . value . credentials ], [])
694+
695+ content {
696+ client_certificate_tls_auth = credentials. value . client_certificate_tls_auth
697+ sasl_scram_512_auth = credentials. value . sasl_scram_512_auth
698+ }
699+ }
700+ }
701+ }
702+
682703 dynamic "kinesis_stream_parameters" {
683704 for_each = try ([source_parameters . value . kinesis_stream_parameters ], [])
684705
You can’t perform that action at this time.
0 commit comments