Skip to content

Commit 1ec69d3

Browse files
authored
Merge pull request #21 from NHSDigital/amb-1688-batch-processing-lambda
AMB-1688: Batch Processing Lambda
2 parents 156ac8f + b40eae0 commit 1ec69d3

File tree

6 files changed

+151
-0
lines changed

6 files changed

+151
-0
lines changed

terraform/batch-processing.tf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
module "batch-processing" {
2+
source = "./batch-processing"
3+
environment = local.environment
4+
prefix = local.prefix
5+
short_prefix = local.short_prefix
6+
}

terraform/batch-processing/iam.tf

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
resource "aws_iam_role" batch_processing_lambda_role {
2+
name = "${var.short_prefix}-batch-processing-lambda-role"
3+
assume_role_policy = <<EOF
4+
{
5+
"Version": "2012-10-17",
6+
"Statement": [
7+
{
8+
"Action": [
9+
"sts:AssumeRole"
10+
],
11+
"Principal": {
12+
"Service": "lambda.amazonaws.com"
13+
},
14+
"Effect": "Allow",
15+
"Sid": ""
16+
}
17+
]
18+
}
19+
EOF
20+
}
21+
22+
resource "aws_iam_policy" batch_processing_lambda_policy {
23+
name = "${var.short_prefix}-batch-processing-lambda-policy"
24+
policy = <<EOF
25+
{
26+
"Version": "2012-10-17",
27+
"Statement": [
28+
{
29+
"Action": [
30+
"s3:ListBucket",
31+
"s3:GetObject",
32+
"s3:CopyObject",
33+
"s3:HeadObject"
34+
],
35+
"Effect": "Allow",
36+
"Resource": [
37+
"arn:aws:s3:::${var.prefix}-batch-lambda-source",
38+
"arn:aws:s3:::${var.prefix}-batch-lambda-source/*"
39+
]
40+
},
41+
{
42+
"Action": [
43+
"s3:ListBucket",
44+
"s3:PutObject",
45+
"s3:PutObjectAcl",
46+
"s3:CopyObject",
47+
"s3:HeadObject"
48+
],
49+
"Effect": "Allow",
50+
"Resource": [
51+
"arn:aws:s3:::${var.prefix}-batch-lambda-destination",
52+
"arn:aws:s3:::${var.prefix}-batch-lambda-destination/*"
53+
]
54+
},
55+
{
56+
"Action": [
57+
"logs:CreateLogGroup",
58+
"logs:CreateLogStream",
59+
"logs:PutLogEvents"
60+
],
61+
"Effect": "Allow",
62+
"Resource": "*"
63+
}
64+
]
65+
}
66+
EOF
67+
}
68+
69+
resource "aws_iam_role_policy_attachment" "apig_lambda_role_to_policy" {
70+
role = aws_iam_role.batch_processing_lambda_role.name
71+
policy_arn = aws_iam_policy.batch_processing_lambda_policy.arn
72+
}
73+
74+
resource "aws_lambda_permission" "allow_terraform_bucket" {
75+
statement_id = "AllowExecutionFromS3Bucket"
76+
action = "lambda:InvokeFunction"
77+
function_name = "${aws_lambda_function.batch_processing_lambda.arn}"
78+
principal = "s3.amazonaws.com"
79+
source_arn = "${aws_s3_bucket.batch_lambda_source_bucket.arn}"
80+
}
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
locals {
2+
lambda_file_name = "batch_processing.py"
3+
}
4+
5+
data "archive_file" "lambda_zip" {
6+
type = "zip"
7+
source_file = "${path.module}/src/${local.lambda_file_name}"
8+
output_path = "build/batch_processing_lambda.zip"
9+
}
10+
11+
resource "aws_lambda_function" "batch_processing_lambda" {
12+
role = aws_iam_role.batch_processing_lambda_role.arn
13+
timeout = 300
14+
filename = data.archive_file.lambda_zip.output_path
15+
function_name = "${var.short_prefix}_batch_processing_lambda"
16+
handler = "batch_processing.lambda_handler"
17+
runtime = "python3.9"
18+
source_code_hash = data.archive_file.lambda_zip.output_base64sha256
19+
}

terraform/batch-processing/s3.tf

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
locals {
2+
// Flag so we can force delete s3 buckets with items in for pr and shortcode environments only.
3+
is_temp = length(regexall("[a-z]{2,4}-?[0-9]+", var.environment)) > 0
4+
}
5+
6+
resource "aws_s3_bucket" "batch_lambda_source_bucket" {
7+
bucket = "${var.prefix}-batch-lambda-source"
8+
force_destroy = local.is_temp
9+
}
10+
11+
resource "aws_s3_bucket" "batch_lambda_destination_bucket" {
12+
bucket = "${var.prefix}-batch-lambda-destination"
13+
force_destroy = local.is_temp
14+
}
15+
16+
resource "aws_s3_bucket_notification" "batch_processing_source_lambda_trigger" {
17+
bucket = "${aws_s3_bucket.batch_lambda_source_bucket.id}"
18+
lambda_function {
19+
lambda_function_arn = "${aws_lambda_function.batch_processing_lambda.arn}"
20+
events = ["s3:ObjectCreated:*"]
21+
}
22+
depends_on = [ aws_lambda_permission.allow_terraform_bucket ]
23+
}
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from time import time
2+
from boto3 import resource
3+
4+
5+
def lambda_handler(_event, _context):
6+
source_bucket_name = _event.get("Records")[0].get("s3").get("bucket").get("name")
7+
dest_bucket_name = source_bucket_name.replace("source", "destination")
8+
output_bucket = resource('s3').Bucket(dest_bucket_name)
9+
10+
# Write some placeholder bytestring data to a file in the bucket,
11+
# so we can test that the lambda writes to the correct output bucket.
12+
filename = f"output_report_{time()}.txt"
13+
data = (b'Test file to see if the lambda writes to the correct s3 bucket. '
14+
b'If our AWS bill skyrockets, this file has been written to the wrong bucket!')
15+
16+
output_bucket.put_object(Body=data, Key=filename)
17+
18+
return {
19+
'statusCode': 200
20+
}
Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
variable "prefix" {}
2+
variable "short_prefix" {}
3+
variable "environment" {}

0 commit comments

Comments
 (0)