Skip to content

Commit 0314093

Browse files
committed
Initial tf setup
1 parent cc6a7ef commit 0314093

File tree

5 files changed

+95
-25
lines changed

5 files changed

+95
-25
lines changed

filenameprocessor/src/file_name_processor.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -163,6 +163,7 @@ def lambda_handler(event: dict, context) -> None: # pylint: disable=unused-argu
163163

164164
logger.info("Filename processor lambda task started")
165165
for record in event["Records"]:
166+
print(record)
166167
handle_record(record)
167168

168169
logger.info("Filename processor lambda task completed")
Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
resource "aws_cloudwatch_event_rule" "batch_file_created" {
2+
name = "${local.short_prefix}-data-source-batch-file-created"
3+
description = "Batch file was added to the data sources S3 bucket"
4+
5+
event_pattern = jsonencode({
6+
source = [
7+
"aws.s3"
8+
],
9+
detail-type = [
10+
"Object Created"
11+
],
12+
detail = {
13+
bucket = {
14+
name = ["${aws_s3_bucket.batch_data_source_bucket.bucket}"]
15+
},
16+
object = {
17+
key = [
18+
{
19+
anything-but = {
20+
prefix = "archive/"
21+
}
22+
},
23+
{
24+
anything-but = {
25+
prefix = "processing/"
26+
}
27+
}
28+
]
29+
}
30+
}
31+
})
32+
}
33+
34+
resource "aws_cloudwatch_event_target" "batch_file_created_sqs_queue" {
35+
rule = aws_cloudwatch_event_rule.batch_file_created.name
36+
arn = aws_sqs_queue.batch_file_created_queue.arn
37+
38+
sqs_target {
39+
message_group_id = "new_batch_file"
40+
}
41+
}

terraform/file_name_processor.tf

Lines changed: 24 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -179,13 +179,24 @@ resource "aws_iam_policy" "filenameprocessor_lambda_sqs_policy" {
179179

180180
policy = jsonencode({
181181
Version = "2012-10-17",
182-
Statement = [{
183-
Effect = "Allow",
184-
Action = [
185-
"sqs:SendMessage"
186-
],
187-
Resource = aws_sqs_queue.supplier_fifo_queue.arn
188-
}]
182+
Statement = [
183+
{
184+
Effect = "Allow",
185+
Action = [
186+
"sqs:SendMessage"
187+
],
188+
Resource = aws_sqs_queue.supplier_fifo_queue.arn
189+
},
190+
{
191+
Effect = "Allow",
192+
Action = [
193+
"sqs:ReceiveMessage",
194+
"sqs:DeleteMessage",
195+
"sqs:GetQueueAttributes"
196+
],
197+
Resource = aws_sqs_queue.batch_file_created_queue.arn
198+
}
199+
]
189200
})
190201
}
191202

@@ -266,6 +277,7 @@ resource "aws_iam_role_policy_attachment" "filenameprocessor_lambda_dynamo_acces
266277
role = aws_iam_role.filenameprocessor_lambda_exec_role.name
267278
policy_arn = aws_iam_policy.filenameprocessor_dynamo_access_policy.arn
268279
}
280+
269281
# Lambda Function with Security Group and VPC.
270282
resource "aws_lambda_function" "file_processor_lambda" {
271283
function_name = "${local.short_prefix}-filenameproc_lambda"
@@ -304,24 +316,11 @@ resource "aws_lambda_function" "file_processor_lambda" {
304316

305317
}
306318

307-
308-
# Permission for S3 to invoke Lambda function
309-
resource "aws_lambda_permission" "s3_invoke_permission" {
310-
statement_id = "AllowExecutionFromS3"
311-
action = "lambda:InvokeFunction"
312-
function_name = aws_lambda_function.file_processor_lambda.function_name
313-
principal = "s3.amazonaws.com"
314-
source_arn = aws_s3_bucket.batch_data_source_bucket.arn
315-
}
316-
317-
# S3 Bucket notification to trigger Lambda function
318-
resource "aws_s3_bucket_notification" "datasources_lambda_notification" {
319-
bucket = aws_s3_bucket.batch_data_source_bucket.bucket
320-
321-
lambda_function {
322-
lambda_function_arn = aws_lambda_function.file_processor_lambda.arn
323-
events = ["s3:ObjectCreated:*"]
324-
}
319+
resource "aws_lambda_event_source_mapping" "batch_file_created_sqs_to_lambda" {
320+
event_source_arn = aws_sqs_queue.batch_file_created_queue.arn
321+
function_name = aws_lambda_function.file_processor_lambda.arn
322+
batch_size = 1
323+
enabled = true
325324
}
326325

327326
resource "aws_cloudwatch_log_group" "file_name_processor_log_group" {

terraform/s3_config.tf

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,11 @@ resource "aws_s3_bucket" "batch_data_source_bucket" {
33
force_destroy = local.is_temp
44
}
55

6+
resource "aws_s3_bucket_notification" "datasources_file_created" {
7+
bucket = aws_s3_bucket.batch_data_source_bucket.id
8+
eventbridge = true
9+
}
10+
611
resource "aws_s3_bucket_public_access_block" "batch_data_source_bucket_public_access_block" {
712
bucket = aws_s3_bucket.batch_data_source_bucket.id
813

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
data "aws_iam_policy_document" "batch_file_created_queue_policy" {
2+
statement {
3+
effect = "Allow"
4+
5+
principals {
6+
type = "Service"
7+
identifiers = ["events.amazonaws.com"]
8+
}
9+
10+
actions = ["sqs:SendMessage"]
11+
resources = [
12+
aws_sqs_queue.batch_file_created_queue.arn
13+
]
14+
}
15+
}
16+
17+
# FIFO SQS Queue - targetted by Event Bridge for new objects created in the data-sources S3 bucket
18+
resource "aws_sqs_queue" "batch_file_created_queue" {
19+
name = "${local.short_prefix}-batch-file-created-queue.fifo"
20+
policy = data.aws_iam_policy_document.batch_file_created_queue_policy.json
21+
fifo_queue = true
22+
content_based_deduplication = true # Optional, helps with deduplication
23+
visibility_timeout_seconds = 60
24+
}

0 commit comments

Comments
 (0)