Skip to content

Commit 50a09a4

Browse files
committed
id_sync_lambda.tf from #686
1 parent ca242f0 commit 50a09a4

File tree

2 files changed

+68
-112
lines changed

2 files changed

+68
-112
lines changed

terraform/id_sync_lambda.tf

Lines changed: 66 additions & 111 deletions
Original file line numberDiff line numberDiff line change
@@ -1,62 +1,8 @@
11
# Define the directory containing the Docker image and calculate its SHA-256 hash for triggering redeployments
22
locals {
3-
lambdas_dir = abspath("${path.root}/../lambdas")
4-
shared_dir = abspath("${path.root}/../lambdas/shared")
5-
id_sync_lambda_dir = abspath("${path.root}/../lambdas/id_sync")
6-
7-
# Get files from both directories
8-
shared_files = fileset(local.shared_dir, "**")
3+
id_sync_lambda_dir = abspath("${path.root}/../redis_sync")
94
id_sync_lambda_files = fileset(local.id_sync_lambda_dir, "**")
10-
11-
# Calculate SHA for both directories
12-
shared_dir_sha = sha1(join("", [for f in local.shared_files : filesha1("${local.shared_dir}/${f}")]))
135
id_sync_lambda_dir_sha = sha1(join("", [for f in local.id_sync_lambda_files : filesha1("${local.id_sync_lambda_dir}/${f}")]))
14-
15-
# Combined SHA to trigger rebuild when either directory changes
16-
combined_sha = sha1("${local.shared_dir_sha}${local.id_sync_lambda_dir_sha}")
17-
}
18-
19-
output "debug_build_paths" {
20-
value = {
21-
lambdas_dir = local.lambdas_dir
22-
shared_dir = local.shared_dir
23-
id_sync_lambda_dir = local.id_sync_lambda_dir
24-
shared_files_count = length(local.shared_files)
25-
id_sync_files_count = length(local.id_sync_lambda_files)
26-
combined_sha = local.combined_sha
27-
dockerfile_exists = fileexists("${local.id_sync_lambda_dir}/Dockerfile")
28-
shared_common_exists = fileexists("${local.shared_dir}/src/common/__init__.py")
29-
}
30-
}
31-
32-
# Debug: List some files from each directory
33-
output "debug_file_listing" {
34-
value = {
35-
shared_files_sample = slice(local.shared_files, 0, min(5, length(local.shared_files)))
36-
}
37-
}
38-
39-
resource "null_resource" "debug_build_context" {
40-
provisioner "local-exec" {
41-
command = <<-EOT
42-
echo "SAW === HOST SYSTEM PATHS ==="
43-
echo "Terraform execution directory: $(pwd)"
44-
echo "Host build context: ${local.lambdas_dir}"
45-
echo "Host Dockerfile location: ${local.id_sync_lambda_dir}/Dockerfile"
46-
echo ""
47-
echo "Docker build command that will be executed:"
48-
echo "docker build -f id_sync/Dockerfile ${local.lambdas_dir}"
49-
echo ""
50-
echo "=== HOST BUILD CONTEXT CONTENTS ==="
51-
echo "What Docker can see from host:"
52-
ls -la "${local.lambdas_dir}/"
53-
EOT
54-
}
55-
}
56-
57-
# Reference the existing SQS queue
58-
data "aws_sqs_queue" "existing_sqs_queue" {
59-
name = "id_sync_test_queue"
606
}
617

628
resource "aws_ecr_repository" "id_sync_lambda_repository" {
@@ -93,40 +39,9 @@ module "id_sync_docker_image" {
9339

9440
platform = "linux/amd64"
9541
use_image_tag = false
96-
source_path = local.lambdas_dir # parent lambdas directory
97-
docker_file_path = "id_sync/Dockerfile" # Add this line
98-
triggers = {
99-
dir_sha = local.combined_sha # Changed to combined SHA
100-
}
101-
}
102-
103-
# Add a local provisioner to debug build context
104-
resource "null_resource" "debug_build_context2" {
42+
source_path = local.id_sync_lambda_dir
10543
triggers = {
106-
dir_sha = local.combined_sha
107-
}
108-
109-
provisioner "local-exec" {
110-
command = <<-EOT
111-
echo "SAW === BUILD CONTEXT DEBUG ==="
112-
echo "Build context: ${local.lambdas_dir}"
113-
echo "Dockerfile location: ${local.id_sync_lambda_dir}/Dockerfile"
114-
echo ""
115-
echo "Checking Dockerfile exists:"
116-
ls -la "${local.id_sync_lambda_dir}/Dockerfile" || echo "Dockerfile NOT FOUND!"
117-
echo ""
118-
echo "Checking shared directory structure:"
119-
ls -la "${local.shared_dir}/src/common/" || echo "Shared common directory NOT FOUND!"
120-
echo ""
121-
echo "Files in build context (lambdas dir):"
122-
ls -la "${local.lambdas_dir}/"
123-
echo ""
124-
echo "Shared files structure:"
125-
find "${local.shared_dir}" -type f -name "*.py" | head -10
126-
echo ""
127-
echo "ID Sync files structure:"
128-
find "${local.id_sync_lambda_dir}" -type f -name "*.py" | head -10
129-
EOT
44+
dir_sha = local.id_sync_lambda_dir_sha
13045
}
13146
}
13247

@@ -176,6 +91,7 @@ resource "aws_iam_role" "id_sync_lambda_exec_role" {
17691
})
17792
}
17893

94+
# Policy for Lambda execution role
17995
resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
18096
name = "${local.short_prefix}-id-sync-lambda-exec-policy"
18197
policy = jsonencode({
@@ -188,23 +104,21 @@ resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
188104
"logs:CreateLogStream",
189105
"logs:PutLogEvents"
190106
]
191-
Resource = "arn:aws:logs:${var.aws_region}:${local.immunisation_account_id}:log-group:/aws/lambda/${local.short_prefix}-id_sync_lambda:*"
107+
Resource = "arn:aws:logs:${var.aws_region}:${var.immunisation_account_id}:log-group:/aws/lambda/${local.short_prefix}-id_sync_lambda:*"
192108
},
193109
{
194110
Effect = "Allow"
195111
Action = [
196-
"ecr:GetAuthorizationToken"
112+
"s3:GetObject",
113+
"s3:ListBucket",
114+
"s3:PutObject",
115+
"s3:CopyObject",
116+
"s3:DeleteObject"
197117
]
198-
Resource = "*"
199-
},
200-
{
201-
Effect = "Allow"
202-
Action = [
203-
"ecr:BatchCheckLayerAvailability",
204-
"ecr:GetDownloadUrlForLayer",
205-
"ecr:BatchGetImage"
118+
Resource = [
119+
aws_s3_bucket.batch_data_source_bucket.arn,
120+
"${aws_s3_bucket.batch_data_source_bucket.arn}/*"
206121
]
207-
Resource = aws_ecr_repository.id_sync_lambda_repository.arn
208122
},
209123
{
210124
Effect = "Allow"
@@ -227,6 +141,18 @@ resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
227141
],
228142
Resource = "*"
229143
},
144+
{
145+
Effect = "Allow"
146+
Action = [
147+
"s3:GetObject",
148+
"s3:PutObject",
149+
"s3:ListBucket"
150+
]
151+
Resource = [
152+
local.config_bucket_arn,
153+
"${local.config_bucket_arn}/*"
154+
]
155+
},
230156
{
231157
Effect : "Allow",
232158
Action : [
@@ -239,17 +165,28 @@ resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
239165
Effect = "Allow"
240166
Action = "lambda:InvokeFunction"
241167
Resource = [
242-
"arn:aws:lambda:${var.aws_region}:${local.immunisation_account_id}:function:imms-${local.env}-id_sync_lambda",
168+
"arn:aws:lambda:${var.aws_region}:${var.immunisation_account_id}:function:imms-${var.sub_environment}-id_sync_lambda",
243169
]
244170
},
171+
# NEW
172+
# NB anomaly: do we want this in "id_sync_lambda_sqs_access_policy"?
245173
{
246-
Effect = "Allow"
174+
Effect = "Allow",
247175
Action = [
248176
"sqs:ReceiveMessage",
249177
"sqs:DeleteMessage",
250178
"sqs:GetQueueAttributes"
251-
]
252-
Resource = data.aws_sqs_queue.existing_sqs_queue.arn
179+
],
180+
Resource = "arn:aws:sqs:eu-west-2:${var.immunisation_account_id}:${local.short_prefix}-id-sync-queue"
181+
},
182+
# NB anomaly: in redis_sync this appears in "redis_sync_lambda_kms_access_policy"
183+
{
184+
Effect = "Allow",
185+
Action = [
186+
"kms:Decrypt",
187+
"kms:GenerateDataKey"
188+
],
189+
Resource = data.aws_kms_key.existing_id_sync_sqs_encryption_key.arn
253190
}
254191
]
255192
})
@@ -268,6 +205,17 @@ resource "aws_iam_policy" "id_sync_lambda_kms_access_policy" {
268205
"kms:Decrypt"
269206
]
270207
Resource = data.aws_kms_key.existing_lambda_encryption_key.arn
208+
},
209+
{
210+
Effect = "Allow"
211+
Action = [
212+
"kms:Encrypt",
213+
"kms:Decrypt",
214+
"kms:GenerateDataKey*"
215+
]
216+
Resource = [
217+
data.aws_kms_key.existing_s3_encryption_key.arn,
218+
]
271219
}
272220
]
273221
})
@@ -301,9 +249,14 @@ resource "aws_lambda_function" "id_sync_lambda" {
301249

302250
environment {
303251
variables = {
304-
ID_SYNC_PROC_LAMBDA_NAME = "imms-${local.env}-id_sync_lambda"
252+
CONFIG_BUCKET_NAME = local.config_bucket_name
253+
REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].address
254+
REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0].port
255+
ID_SYNC_PROC_LAMBDA_NAME = "imms-${var.sub_environment}-id_sync_lambda"
256+
# NEW
257+
DELTA_TABLE_NAME = aws_dynamodb_table.delta-dynamodb-table.name
258+
PDS_ENV = var.pds_environment
305259
SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
306-
PDS_ENV = local.environment == "prod" ? "prod" : local.environment == "ref" ? "ref" : "int"
307260
}
308261
}
309262
kms_key_arn = data.aws_kms_key.existing_lambda_encryption_key.arn
@@ -319,15 +272,17 @@ resource "aws_cloudwatch_log_group" "id_sync_log_group" {
319272
retention_in_days = 30
320273
}
321274

322-
# SQS Event Source Mapping for Lambda
323-
resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
324-
event_source_arn = data.aws_sqs_queue.existing_sqs_queue.arn
325-
function_name = aws_lambda_function.id_sync_lambda.arn
275+
# delete config_lambda_notification / new_s3_invoke_permission - not required; duplicate
326276

277+
# NEW
278+
resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
279+
event_source_arn = "arn:aws:sqs:eu-west-2:${var.immunisation_account_id}:${local.short_prefix}-id-sync-queue"
280+
function_name = aws_lambda_function.id_sync_lambda.arn # TODO
281+
327282
# Optional: Configure batch size and other settings
328283
batch_size = 10
329284
maximum_batching_window_in_seconds = 5
330-
285+
331286
# Optional: Configure error handling
332287
function_response_types = ["ReportBatchItemFailures"]
333-
}
288+
}

terraform/sqs_id_sync.tf

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
11
resource "aws_sqs_queue" "id_sync_queue" {
22
name = "${local.short_prefix}-id-sync-queue"
33
kms_master_key_id = data.aws_kms_key.existing_id_sync_sqs_encryption_key.arn
4-
visibility_timeout_seconds = 60
4+
# TODO: visibility_timeout_seconds must not be less than aws_lambda_function.id_sync_lambda_timeout
5+
visibility_timeout_seconds = 360
56
redrive_policy = jsonencode({
67
deadLetterTargetArn = aws_sqs_queue.id_sync_dlq.arn
78
maxReceiveCount = 4

0 commit comments

Comments
 (0)