@@ -16,49 +16,6 @@ locals {
1616 combined_sha = sha1 (" ${ local . shared_dir_sha } ${ local . id_sync_lambda_dir_sha } " )
1717}
1818
19- output "debug_build_paths" {
20- value = {
21- lambdas_dir = local.lambdas_dir
22- shared_dir = local.shared_dir
23- id_sync_lambda_dir = local.id_sync_lambda_dir
24- shared_files_count = length (local. shared_files )
25- id_sync_files_count = length (local. id_sync_lambda_files )
26- combined_sha = local.combined_sha
27- dockerfile_exists = fileexists (" ${ local . id_sync_lambda_dir } /Dockerfile" )
28- shared_common_exists = fileexists (" ${ local . shared_dir } /src/common/__init__.py" )
29- }
30- }
31-
32- # Debug: List some files from each directory
33- output "debug_file_listing" {
34- value = {
35- shared_files_sample = slice (local. shared_files , 0 , min (5 , length (local. shared_files )))
36- }
37- }
38-
39- resource "null_resource" "debug_build_context" {
40- provisioner "local-exec" {
41- command = <<- EOT
42- echo "SAW === HOST SYSTEM PATHS ==="
43- echo "Terraform execution directory: $(pwd)"
44- echo "Host build context: ${ local . lambdas_dir } "
45- echo "Host Dockerfile location: ${ local . id_sync_lambda_dir } /Dockerfile"
46- echo ""
47- echo "Docker build command that will be executed:"
48- echo "docker build -f id_sync/Dockerfile ${ local . lambdas_dir } "
49- echo ""
50- echo "=== HOST BUILD CONTEXT CONTENTS ==="
51- echo "What Docker can see from host:"
52- ls -la "${ local . lambdas_dir } /"
53- EOT
54- }
55- }
56-
57- # Reference the existing SQS queue
58- data "aws_sqs_queue" "existing_sqs_queue" {
59- name = " id_sync_test_queue"
60- }
61-
6219resource "aws_ecr_repository" "id_sync_lambda_repository" {
6320 image_scanning_configuration {
6421 scan_on_push = true
@@ -74,6 +31,7 @@ module "id_sync_docker_image" {
7431
7532 create_ecr_repo = false
7633 ecr_repo = aws_ecr_repository. id_sync_lambda_repository . name
34+ docker_file_path = " ../id_sync.Dockerfile"
7735 ecr_repo_lifecycle_policy = jsonencode ({
7836 " rules" : [
7937 {
@@ -93,40 +51,9 @@ module "id_sync_docker_image" {
9351
9452 platform = " linux/amd64"
9553 use_image_tag = false
96- source_path = local. lambdas_dir # parent lambdas directory
97- docker_file_path = " id_sync/Dockerfile" # Add this line
98- triggers = {
99- dir_sha = local.combined_sha # Changed to combined SHA
100- }
101- }
102-
103- # Add a local provisioner to debug build context
104- resource "null_resource" "debug_build_context2" {
54+ source_path = local. id_sync_lambda_dir
10555 triggers = {
106- dir_sha = local.combined_sha
107- }
108-
109- provisioner "local-exec" {
110- command = <<- EOT
111- echo "SAW === BUILD CONTEXT DEBUG ==="
112- echo "Build context: ${ local . lambdas_dir } "
113- echo "Dockerfile location: ${ local . id_sync_lambda_dir } /Dockerfile"
114- echo ""
115- echo "Checking Dockerfile exists:"
116- ls -la "${ local . id_sync_lambda_dir } /Dockerfile" || echo "Dockerfile NOT FOUND!"
117- echo ""
118- echo "Checking shared directory structure:"
119- ls -la "${ local . shared_dir } /src/common/" || echo "Shared common directory NOT FOUND!"
120- echo ""
121- echo "Files in build context (lambdas dir):"
122- ls -la "${ local . lambdas_dir } /"
123- echo ""
124- echo "Shared files structure:"
125- find "${ local . shared_dir } " -type f -name "*.py" | head -10
126- echo ""
127- echo "ID Sync files structure:"
128- find "${ local . id_sync_lambda_dir } " -type f -name "*.py" | head -10
129- EOT
56+ dir_sha = local.id_sync_lambda_dir_sha
13057 }
13158}
13259
@@ -176,6 +103,7 @@ resource "aws_iam_role" "id_sync_lambda_exec_role" {
176103 })
177104}
178105
106+ # Policy for Lambda execution role
179107resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
180108 name = " ${ local . short_prefix } -id-sync-lambda-exec-policy"
181109 policy = jsonencode ({
@@ -188,23 +116,21 @@ resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
188116 " logs:CreateLogStream" ,
189117 " logs:PutLogEvents"
190118 ]
191- Resource = " arn:aws:logs:${ var . aws_region } :${ local . immunisation_account_id } :log-group:/aws/lambda/${ local . short_prefix } -id_sync_lambda:*"
119+ Resource = " arn:aws:logs:${ var . aws_region } :${ var . immunisation_account_id } :log-group:/aws/lambda/${ local . short_prefix } -id_sync_lambda:*"
192120 },
193121 {
194122 Effect = " Allow"
195123 Action = [
196- " ecr:GetAuthorizationToken"
124+ " s3:GetObject" ,
125+ " s3:ListBucket" ,
126+ " s3:PutObject" ,
127+ " s3:CopyObject" ,
128+ " s3:DeleteObject"
197129 ]
198- Resource = " *"
199- },
200- {
201- Effect = " Allow"
202- Action = [
203- " ecr:BatchCheckLayerAvailability" ,
204- " ecr:GetDownloadUrlForLayer" ,
205- " ecr:BatchGetImage"
130+ Resource = [
131+ aws_s3_bucket.batch_data_source_bucket.arn,
132+ " ${ aws_s3_bucket . batch_data_source_bucket . arn } /*"
206133 ]
207- Resource = aws_ecr_repository.id_sync_lambda_repository.arn
208134 },
209135 {
210136 Effect = " Allow"
@@ -227,6 +153,18 @@ resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
227153 ],
228154 Resource = " *"
229155 },
156+ {
157+ Effect = " Allow"
158+ Action = [
159+ " s3:GetObject" ,
160+ " s3:PutObject" ,
161+ " s3:ListBucket"
162+ ]
163+ Resource = [
164+ local.config_bucket_arn,
165+ " ${ local . config_bucket_arn } /*"
166+ ]
167+ },
230168 {
231169 Effect : " Allow" ,
232170 Action : [
@@ -239,17 +177,28 @@ resource "aws_iam_policy" "id_sync_lambda_exec_policy" {
239177 Effect = " Allow"
240178 Action = " lambda:InvokeFunction"
241179 Resource = [
242- " arn:aws:lambda:${ var . aws_region } :${ local . immunisation_account_id } :function:imms-${ local . env } -id_sync_lambda" ,
180+ " arn:aws:lambda:${ var . aws_region } :${ var . immunisation_account_id } :function:imms-${ var . sub_environment } -id_sync_lambda" ,
243181 ]
244182 },
183+ # NEW
184+ # NB anomaly: do we want this in "id_sync_lambda_sqs_access_policy"?
245185 {
246- Effect = " Allow"
186+ Effect = " Allow" ,
247187 Action = [
248188 " sqs:ReceiveMessage" ,
249189 " sqs:DeleteMessage" ,
250190 " sqs:GetQueueAttributes"
251- ]
252- Resource = data.aws_sqs_queue.existing_sqs_queue.arn
191+ ],
192+ Resource = " arn:aws:sqs:eu-west-2:${ var . immunisation_account_id } :${ local . short_prefix } -id-sync-queue"
193+ },
194+ # NB anomaly: in redis_sync this appears in "redis_sync_lambda_kms_access_policy"
195+ {
196+ Effect = " Allow" ,
197+ Action = [
198+ " kms:Decrypt" ,
199+ " kms:GenerateDataKey"
200+ ],
201+ Resource = data.aws_kms_key.existing_id_sync_sqs_encryption_key.arn
253202 }
254203 ]
255204 })
@@ -268,6 +217,17 @@ resource "aws_iam_policy" "id_sync_lambda_kms_access_policy" {
268217 " kms:Decrypt"
269218 ]
270219 Resource = data.aws_kms_key.existing_lambda_encryption_key.arn
220+ },
221+ {
222+ Effect = " Allow"
223+ Action = [
224+ " kms:Encrypt" ,
225+ " kms:Decrypt" ,
226+ " kms:GenerateDataKey*"
227+ ]
228+ Resource = [
229+ data.aws_kms_key.existing_s3_encryption_key.arn,
230+ ]
271231 }
272232 ]
273233 })
@@ -285,6 +245,29 @@ resource "aws_iam_role_policy_attachment" "id_sync_lambda_kms_policy_attachment"
285245 policy_arn = aws_iam_policy. id_sync_lambda_kms_access_policy . arn
286246}
287247
248+ data "aws_iam_policy_document" "id_sync_policy_document" {
249+ source_policy_documents = [
250+ templatefile (" ${ local . policy_path } /dynamodb.json" , {
251+ " dynamodb_table_name" : aws_dynamodb_table.delta- dynamodb- table.name
252+ }),
253+ templatefile (" ${ local . policy_path } /dynamodb_stream.json" , {
254+ " dynamodb_table_name" : aws_dynamodb_table.events- dynamodb- table.name
255+ })
256+ ]
257+ }
258+
259+ resource "aws_iam_policy" "id_sync_lambda_dynamodb_access_policy" {
260+ name = " ${ local . short_prefix } -id-sync-lambda-dynamodb-access-policy"
261+ description = " Allow Lambda to access DynamoDB"
262+ policy = data. aws_iam_policy_document . id_sync_policy_document . json
263+ }
264+
265+ # Attach the dynamodb policy to the Lambda role
266+ resource "aws_iam_role_policy_attachment" "id_sync_lambda_dynamodb_policy_attachment" {
267+ role = aws_iam_role. id_sync_lambda_exec_role . name
268+ policy_arn = aws_iam_policy. id_sync_lambda_dynamodb_access_policy . arn
269+ }
270+
288271# Lambda Function with Security Group and VPC.
289272resource "aws_lambda_function" "id_sync_lambda" {
290273 function_name = " ${ local . short_prefix } -id_sync_lambda"
@@ -301,11 +284,15 @@ resource "aws_lambda_function" "id_sync_lambda" {
301284
302285 environment {
303286 variables = {
304- ID_SYNC_PROC_LAMBDA_NAME = " imms-${ local . env } -id_sync_lambda"
287+ CONFIG_BUCKET_NAME = local.config_bucket_name
288+ REDIS_HOST = data.aws_elasticache_cluster.existing_redis.cache_nodes[0 ].address
289+ REDIS_PORT = data.aws_elasticache_cluster.existing_redis.cache_nodes[0 ].port
290+ ID_SYNC_PROC_LAMBDA_NAME = " imms-${ var . sub_environment } -id_sync_lambda"
291+ # NEW
292+ DELTA_TABLE_NAME = aws_dynamodb_table.delta- dynamodb- table.name
293+ IEDS_TABLE_NAME = aws_dynamodb_table.events- dynamodb- table.name
294+ PDS_ENV = var.pds_environment
305295 SPLUNK_FIREHOSE_NAME = module.splunk.firehose_stream_name
306- PDS_ENV = local.environment == " prod" ? " prod" : local.environment == " ref" ? " ref" : " int"
307- DELTA_TABLE_NAME = aws_dynamodb_table.delta- dynamodb- table.name
308- IEDS_TABLE_NAME = aws_dynamodb_table.events- dynamodb- table.name
309296 }
310297 }
311298 kms_key_arn = data. aws_kms_key . existing_lambda_encryption_key . arn
@@ -321,15 +308,17 @@ resource "aws_cloudwatch_log_group" "id_sync_log_group" {
321308 retention_in_days = 30
322309}
323310
324- # SQS Event Source Mapping for Lambda
325- resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
326- event_source_arn = data. aws_sqs_queue . existing_sqs_queue . arn
327- function_name = aws_lambda_function. id_sync_lambda . arn
311+ # delete config_lambda_notification / new_s3_invoke_permission - not required; duplicate
328312
313+ # NEW
314+ resource "aws_lambda_event_source_mapping" "id_sync_sqs_trigger" {
315+ event_source_arn = " arn:aws:sqs:eu-west-2:${ var . immunisation_account_id } :${ local . short_prefix } -id-sync-queue"
316+ function_name = aws_lambda_function. id_sync_lambda . arn # TODO
317+
329318 # Optional: Configure batch size and other settings
330319 batch_size = 10
331320 maximum_batching_window_in_seconds = 5
332-
321+
333322 # Optional: Configure error handling
334323 function_response_types = [" ReportBatchItemFailures" ]
335- }
324+ }
0 commit comments