55from unittest import TestCase
66from unittest .mock import patch
77
8- from moto import mock_dynamodb , mock_sqs
8+ import botocore
9+ from moto import mock_dynamodb , mock_sqs , mock_s3
910
1011from batch_file_created_event import BatchFileCreatedEvent
1112from exceptions import InvalidBatchSizeError , EventAlreadyProcessingForSupplierAndVaccTypeError
1819
1920sqs_client = boto3 .client ("sqs" , region_name = REGION_NAME )
2021dynamodb_client = boto3 .client ("dynamodb" , region_name = REGION_NAME )
22+ s3_client = boto3 .client ("s3" , region_name = REGION_NAME )
2123
2224
2325@mock_dynamodb
2426@mock_sqs
27+ @mock_s3
2528class TestLambdaHandler (TestCase ):
2629 default_batch_file_event : BatchFileCreatedEvent = BatchFileCreatedEvent (
2730 message_id = "df0b745c-b8cb-492c-ba84-8ea28d9f51d5" ,
@@ -32,6 +35,8 @@ class TestLambdaHandler(TestCase):
3235 created_at_formatted_string = "20250826T14372600"
3336 )
3437 mock_queue_url = MOCK_ENVIRONMENT_DICT .get ("QUEUE_URL" )
38+ mock_source_bucket = MOCK_ENVIRONMENT_DICT .get ("SOURCE_BUCKET_NAME" )
39+ mock_ack_bucket = MOCK_ENVIRONMENT_DICT .get ("ACK_BUCKET_NAME" )
3540
3641 def setUp (self ):
3742 dynamodb_client .create_table (
@@ -66,6 +71,12 @@ def setUp(self):
6671 "FifoQueue" : "true" ,
6772 "ContentBasedDeduplication" : "true"
6873 })
74+
75+ for bucket_name in [self .mock_source_bucket , self .mock_ack_bucket ]:
76+ s3_client .create_bucket (
77+ Bucket = bucket_name , CreateBucketConfiguration = {"LocationConstraint" : REGION_NAME }
78+ )
79+
6980 self .logger_patcher = patch ("batch_processor_filter_service.logger" )
7081 self .mock_logger = self .logger_patcher .start ()
7182 self .firehose_log_patcher = patch ("batch_processor_filter_service.send_log_to_firehose" )
@@ -74,9 +85,31 @@ def setUp(self):
7485 def tearDown (self ):
7586 dynamodb_client .delete_table (TableName = AUDIT_TABLE_NAME )
7687 sqs_client .delete_queue (QueueUrl = self .mock_queue_url )
88+
89+ for bucket_name in [self .mock_source_bucket , self .mock_ack_bucket ]:
90+ for obj in s3_client .list_objects_v2 (Bucket = bucket_name ).get ("Contents" , []):
91+ # Must delete objects before bucket can be deleted
92+ s3_client .delete_object (Bucket = bucket_name , Key = obj ["Key" ])
93+ s3_client .delete_bucket (Bucket = bucket_name )
94+
7795 self .logger_patcher .stop ()
7896 self .firehose_log_patcher .stop ()
7997
98+ def _assert_source_file_moved (self , filename : str ):
99+ """Check used in the duplicate scenario to validate that the original uploaded file is moved"""
100+ with self .assertRaises (botocore .exceptions .ClientError ) as exc :
101+ s3_client .get_object (Bucket = self .mock_source_bucket , Key = filename )
102+
103+ self .assertEqual (str (exc .exception ), "An error occurred (NoSuchKey) when calling the GetObject "
104+ "operation: The specified key does not exist." )
105+ archived_object = s3_client .get_object (Bucket = self .mock_source_bucket , Key = f"archive/{ filename } " )
106+ self .assertIsNotNone (archived_object )
107+
108+ def _assert_ack_file_created (self , ack_file_key : str ):
109+ """Check used in duplicate scenario to validate that the failure ack was created"""
110+ ack_file = s3_client .get_object (Bucket = self .mock_ack_bucket , Key = f"ack/{ ack_file_key } " )
111+ self .assertIsNotNone (ack_file )
112+
80113 def test_lambda_handler_raises_error_when_empty_batch_received (self ):
81114 with self .assertRaises (InvalidBatchSizeError ) as exc :
82115 lambda_handler ({"Records" : []}, {})
@@ -98,21 +131,27 @@ def test_lambda_handler_handles_duplicate_file_scenario(self):
98131 add_entry_to_mock_table (dynamodb_client , AUDIT_TABLE_NAME , self .default_batch_file_event , FileStatus .PROCESSED )
99132 duplicate_file_event = copy .deepcopy (self .default_batch_file_event )
100133 duplicate_file_event ["message_id" ] = "fc9008b7-3865-4dcf-88b8-fc4abafff5f8"
134+ test_file_name = duplicate_file_event ["filename" ]
101135
102136 # Add the audit record for the incoming event
103137 add_entry_to_mock_table (dynamodb_client , AUDIT_TABLE_NAME , duplicate_file_event , FileStatus .QUEUED )
104138
139+ # Create the source file in S3
140+ s3_client .put_object (Bucket = self .mock_source_bucket , Key = test_file_name )
141+
105142 lambda_handler ({"Records" : [make_sqs_record (duplicate_file_event )]}, {})
106143
107144 status = get_audit_entry_status_by_id (dynamodb_client , AUDIT_TABLE_NAME , duplicate_file_event ["message_id" ])
108145 self .assertEqual (status , "Not processed - duplicate" )
109146
110147 sqs_messages = sqs_client .receive_message (QueueUrl = self .mock_queue_url )
111148 self .assertEqual (sqs_messages .get ("Messages" , []), [])
149+ self ._assert_source_file_moved (test_file_name )
150+ self ._assert_ack_file_created ("Menacwy_Vaccinations_v5_TEST_20250820T10210000_InfAck_20250826T14372600.csv" )
112151
113152 self .mock_logger .info .assert_called_once_with (
114153 "A duplicate file has already been processed. Filename: %s" ,
115- "Menacwy_Vaccinations_v5_TEST_20250820T10210000.csv"
154+ test_file_name
116155 )
117156
118157 def test_lambda_handler_raises_error_when_event_already_processing_for_supplier_and_vacc_type (self ):
@@ -164,7 +203,8 @@ def test_lambda_handler_processes_event_successfully(self):
164203 self .assertEqual (len (sqs_messages .get ("Messages" , [])), 1 )
165204 self .assertDictEqual (json .loads (sqs_messages ["Messages" ][0 ]["Body" ]), dict (self .default_batch_file_event ))
166205
167- expected_log_message = "File forwarded for processing by ECS"
206+ expected_log_message = (f"File forwarded for processing by ECS. Filename: "
207+ f"{ self .default_batch_file_event ['filename' ]} " )
168208 self .mock_logger .info .assert_called_once_with (expected_log_message )
169209 self .mock_firehose_send_log .assert_called_once_with (
170210 {** self .default_batch_file_event , "message" : expected_log_message }
@@ -194,7 +234,7 @@ def test_lambda_handler_processes_event_successfully_when_event_for_same_supplie
194234 self .assertEqual (len (sqs_messages .get ("Messages" , [])), 1 )
195235 self .assertDictEqual (json .loads (sqs_messages ["Messages" ][0 ]["Body" ]), dict (test_event ))
196236
197- expected_log_message = "File forwarded for processing by ECS"
237+ expected_log_message = f "File forwarded for processing by ECS. Filename: { test_event [ 'filename' ] } "
198238 self .mock_logger .info .assert_called_once_with (expected_log_message )
199239 self .mock_firehose_send_log .assert_called_once_with (
200240 {** test_event , "message" : expected_log_message }
0 commit comments