33import unittest
44import json
55from decimal import Decimal
6+ from json import JSONDecodeError
67from unittest .mock import patch
78from datetime import datetime , timedelta , timezone
8- from moto import mock_s3 , mock_kinesis , mock_firehose
9+ from moto import mock_s3 , mock_kinesis , mock_firehose , mock_dynamodb
910from boto3 import client as boto3_client
1011
1112from tests .utils_for_recordprocessor_tests .utils_for_recordprocessor_tests import (
1213 GenericSetUp ,
1314 GenericTearDown ,
15+ add_entry_to_table ,
16+ assert_audit_table_entry
1417)
1518from tests .utils_for_recordprocessor_tests .values_for_recordprocessor_tests import (
1619 MockFileDetails ,
2629from tests .utils_for_recordprocessor_tests .utils_for_recordprocessor_tests import create_patch
2730
2831with patch ("os.environ" , MOCK_ENVIRONMENT_DICT ):
29- from constants import Diagnostics
32+ from constants import Diagnostics , FileStatus , FileNotProcessedReason , AUDIT_TABLE_NAME , AuditTableKeys
3033 from batch_processor import main
3134
3235s3_client = boto3_client ("s3" , region_name = REGION_NAME )
3336kinesis_client = boto3_client ("kinesis" , region_name = REGION_NAME )
3437firehose_client = boto3_client ("firehose" , region_name = REGION_NAME )
38+ dynamo_db_client = boto3_client ("dynamodb" , region_name = REGION_NAME )
3539yesterday = datetime .now (timezone .utc ) - timedelta (days = 1 )
3640mock_rsv_emis_file = MockFileDetails .rsv_emis
3741
3842
3943@patch .dict ("os.environ" , MOCK_ENVIRONMENT_DICT )
44+ @mock_dynamodb
4045@mock_s3
4146@mock_kinesis
4247@mock_firehose
4348class TestRecordProcessor (unittest .TestCase ):
4449 """Tests for main function for RecordProcessor"""
4550
4651 def setUp (self ) -> None :
47- GenericSetUp (s3_client , firehose_client , kinesis_client )
52+ GenericSetUp (s3_client , firehose_client , kinesis_client , dynamo_db_client )
4853
4954 redis_patcher = patch ("mappings.redis_client" )
55+ batch_processor_logger_patcher = patch ("batch_processor.logger" )
5056 self .addCleanup (redis_patcher .stop )
57+ self .mock_batch_processor_logger = batch_processor_logger_patcher .start ()
5158 mock_redis_client = redis_patcher .start ()
5259 mock_redis_client .hget .return_value = json .dumps ([{
5360 "code" : "55735004" ,
@@ -151,9 +158,11 @@ def test_e2e_full_permissions(self):
151158 Tests that file containing CREATE, UPDATE and DELETE is successfully processed when the supplier has
152159 full permissions.
153160 """
161+ test_file = mock_rsv_emis_file
154162 self .upload_source_files (ValidMockFileContent .with_new_and_update_and_delete )
163+ add_entry_to_table (test_file , FileStatus .PROCESSING )
155164
156- main (mock_rsv_emis_file .event_full_permissions )
165+ main (test_file .event_full_permissions )
157166
158167 # Assertion case tuples are stuctured as
159168 # (test_name, index, expected_kinesis_data_ignoring_fhir_json,expect_success)
@@ -179,15 +188,18 @@ def test_e2e_full_permissions(self):
179188 ]
180189 self .make_inf_ack_assertions (file_details = mock_rsv_emis_file , passed_validation = True )
181190 self .make_kinesis_assertions (assertion_cases )
191+ assert_audit_table_entry (test_file , FileStatus .PREPROCESSED )
182192
183193 def test_e2e_partial_permissions (self ):
184194 """
185195 Tests that file containing CREATE, UPDATE and DELETE is successfully processed when the supplier only has CREATE
186196 permissions.
187197 """
198+ test_file = mock_rsv_emis_file
199+ add_entry_to_table (test_file , FileStatus .PROCESSING )
188200 self .upload_source_files (ValidMockFileContent .with_new_and_update_and_delete )
189201
190- main (mock_rsv_emis_file .event_create_permissions_only )
202+ main (test_file .event_create_permissions_only )
191203
192204 # Assertion case tuples are stuctured as
193205 # (test_name, index, expected_kinesis_data_ignoring_fhir_json,expect_success)
@@ -229,15 +241,18 @@ def test_e2e_partial_permissions(self):
229241 ]
230242 self .make_inf_ack_assertions (file_details = mock_rsv_emis_file , passed_validation = True )
231243 self .make_kinesis_assertions (assertion_cases )
244+ assert_audit_table_entry (test_file , FileStatus .PREPROCESSED )
232245
233246 def test_e2e_no_required_permissions (self ):
234247 """
235248 Tests that file containing UPDATE and DELETE is successfully processed when the supplier has CREATE permissions
236249 only.
237250 """
251+ test_file = mock_rsv_emis_file
252+ add_entry_to_table (test_file , FileStatus .PROCESSING )
238253 self .upload_source_files (ValidMockFileContent .with_update_and_delete )
239254
240- main (mock_rsv_emis_file .event_create_permissions_only )
255+ main (test_file .event_create_permissions_only )
241256
242257 kinesis_records = kinesis_client .get_records (ShardIterator = self .get_shard_iterator (), Limit = 10 )["Records" ]
243258 self .assertEqual (len (kinesis_records ), 2 )
@@ -247,27 +262,39 @@ def test_e2e_no_required_permissions(self):
247262 self .assertIn ("diagnostics" , data_dict )
248263 self .assertNotIn ("fhir_json" , data_dict )
249264 self .make_inf_ack_assertions (file_details = mock_rsv_emis_file , passed_validation = True )
265+ assert_audit_table_entry (test_file , FileStatus .PREPROCESSED )
250266
251267 def test_e2e_no_permissions (self ):
252268 """
253269 Tests that file containing UPDATE and DELETE is successfully processed when the supplier has no permissions.
254270 """
271+ test_file = mock_rsv_emis_file
272+ add_entry_to_table (test_file , FileStatus .PROCESSING )
255273 self .upload_source_files (ValidMockFileContent .with_update_and_delete )
256274
257- main (mock_rsv_emis_file .event_no_permissions )
275+ main (test_file .event_no_permissions )
258276
259277 kinesis_records = kinesis_client .get_records (ShardIterator = self .get_shard_iterator (), Limit = 10 )["Records" ]
278+ table_entry = dynamo_db_client .get_item (
279+ TableName = AUDIT_TABLE_NAME , Key = {AuditTableKeys .MESSAGE_ID : {"S" : test_file .message_id }}
280+ ).get ("Item" )
260281 self .assertEqual (len (kinesis_records ), 0 )
261282 self .make_inf_ack_assertions (file_details = mock_rsv_emis_file , passed_validation = False )
283+ self .assertDictEqual (table_entry , {
284+ ** test_file .audit_table_entry ,
285+ "status" : {"S" : f"{ FileStatus .NOT_PROCESSED } - { FileNotProcessedReason .UNAUTHORISED } " },
286+ "error_details" : {"S" : "EMIS does not have permissions to perform any of the requested actions." }
287+ })
262288
263289 def test_e2e_invalid_action_flags (self ):
264290 """Tests that file is successfully processed when the ACTION_FLAG field is empty or invalid."""
265-
291+ test_file = mock_rsv_emis_file
292+ add_entry_to_table (test_file , FileStatus .PROCESSING )
266293 self .upload_source_files (
267294 ValidMockFileContent .with_update_and_delete .replace ("update" , "" ).replace ("delete" , "INVALID" )
268295 )
269296
270- main (mock_rsv_emis_file .event_full_permissions )
297+ main (test_file .event_full_permissions )
271298
272299 expected_kinesis_data = {
273300 "diagnostics" : {
@@ -291,14 +318,16 @@ def test_e2e_invalid_action_flags(self):
291318 def test_e2e_differing_amounts_of_data (self ):
292319 """Tests that file containing rows with differing amounts of data present is processed as expected"""
293320 # Create file content with different amounts of data present in each row
321+ test_file = mock_rsv_emis_file
322+ add_entry_to_table (test_file , FileStatus .PROCESSING )
294323 headers = "|" .join (MockFieldDictionaries .all_fields .keys ())
295324 all_fields_values = "|" .join (f'"{ v } "' for v in MockFieldDictionaries .all_fields .values ())
296325 mandatory_fields_only_values = "|" .join (f'"{ v } "' for v in MockFieldDictionaries .mandatory_fields_only .values ())
297326 critical_fields_only_values = "|" .join (f'"{ v } "' for v in MockFieldDictionaries .critical_fields_only .values ())
298327 file_content = f"{ headers } \n { all_fields_values } \n { mandatory_fields_only_values } \n { critical_fields_only_values } "
299328 self .upload_source_files (file_content )
300329
301- main (mock_rsv_emis_file .event_full_permissions )
330+ main (test_file .event_full_permissions )
302331
303332 all_fields_row_expected_kinesis_data = {
304333 "operation_requested" : "UPDATE" ,
@@ -332,6 +361,8 @@ def test_e2e_kinesis_failed(self):
332361 Tests that, for a file with valid content and supplier with full permissions, when the kinesis send fails, the
333362 ack file is created and documents an error.
334363 """
364+ test_file = mock_rsv_emis_file
365+ add_entry_to_table (test_file , FileStatus .PROCESSING )
335366 self .upload_source_files (ValidMockFileContent .with_new_and_update )
336367 # Delete the kinesis stream, to cause kinesis send to fail
337368 kinesis_client .delete_stream (StreamName = Kinesis .STREAM_NAME , EnforceConsumerDeletion = True )
@@ -343,11 +374,14 @@ def test_e2e_kinesis_failed(self):
343374 ): # noqa: E999
344375 mock_time .time .side_effect = [1672531200 , 1672531200.123456 ]
345376 mock_datetime .now .return_value = datetime (2024 , 1 , 1 , 12 , 0 , 0 )
346- main (mock_rsv_emis_file .event_full_permissions )
377+ main (test_file .event_full_permissions )
347378
348379 # Since the failure occured at row level, not file level, the ack file should still be created
349380 # and firehose logs should indicate a successful file level validation
350- self .make_inf_ack_assertions (file_details = mock_rsv_emis_file , passed_validation = True )
381+ table_entry = dynamo_db_client .get_item (
382+ TableName = AUDIT_TABLE_NAME , Key = {AuditTableKeys .MESSAGE_ID : {"S" : test_file .message_id }}
383+ ).get ("Item" )
384+ self .make_inf_ack_assertions (file_details = test_file , passed_validation = True )
351385 expected_log_data = {
352386 "function_name" : "record_processor_file_level_validation" ,
353387 "date_time" : "2024-01-01 12:00:00" ,
@@ -360,6 +394,25 @@ def test_e2e_kinesis_failed(self):
360394 "message" : "Successfully sent for record processing" ,
361395 }
362396 mock_send_log_to_firehose .assert_called_with (expected_log_data )
397+ self .assertDictEqual (table_entry , {
398+ ** test_file .audit_table_entry ,
399+ "status" : {"S" : FileStatus .FAILED },
400+ "error_details" : {"S" : "An error occurred (ResourceNotFoundException) when calling the PutRecord operation"
401+ ": Stream imms-batch-internal-dev-processingdata-stream under account 123456789012"
402+ " not found." }
403+ })
404+
405+ def test_e2e_error_is_logged_if_invalid_json_provided (self ):
406+ """This scenario should not happen. If it does, it means our batch processing system config is broken and we
407+ have received malformed content from SQS -> EventBridge. In this case we log the error so we will be alerted.
408+ However, we cannot do anything with the AuditDB record as we cannot retrieve information from the event"""
409+ malformed_event = '{"test": {}'
410+ main (malformed_event )
411+
412+ logged_message = self .mock_batch_processor_logger .error .call_args [0 ][0 ]
413+ exception = self .mock_batch_processor_logger .error .call_args [0 ][1 ]
414+ self .assertEqual (logged_message , "Error decoding incoming message: %s" )
415+ self .assertIsInstance (exception , JSONDecodeError )
363416
364417
365418if __name__ == "__main__" :
0 commit comments