@@ -427,37 +427,40 @@ def raise_connection_error(*args, **kwargs):
427427 @patch ("elasticache.get_redis_client" )
428428 def test_lambda_handler_extended_attributes_invalid_timestamp (self , mock_get_redis_client ):
429429 """
430- Invalid timestamps (too short or non-parseable) should fail validation and move to archive.
430+ Invalid timestamps (too short or non-parseable) should fail validation
431+ and move to extended-attributes-archive/.
431432 """
432- # Valid Redis
433+ # Set up valid Redis responses
433434 mock_redis = fakeredis .FakeStrictRedis ()
434435 mock_redis .hget = Mock (side_effect = create_mock_hget ({"X8E5B" : "RAVS" }, {}))
435436 mock_redis .hkeys = Mock (return_value = ["COVID" , * all_vaccine_types_in_this_test_file ])
436437 mock_get_redis_client .return_value = mock_redis
437438
438- # Case 1: too short timestamp
439- invalid_timestamp_key = "Vaccination_Extended_Attributes_v1_5_X8E5B_20000101T0000.csv"
440- s3_client .put_object (
441- Bucket = BucketNames .SOURCE , Key = invalid_timestamp_key , Body = MOCK_EXTENDED_ATTRIBUTES_FILE_CONTENT
442- )
443- with patch ("file_name_processor.uuid4" , return_value = "invalid_timestamp_id" ):
444- lambda_handler (self .make_event ([self .make_record (invalid_timestamp_key )]), None )
445- # Failed audit and archive
446- item1 = self .get_audit_table_items ()[0 ]
447- self .assertEqual (item1 [AuditTableKeys .STATUS ]["S" ], "Failed" )
448- s3_client .get_object (Bucket = BucketNames .SOURCE , Key = f"extended-attributes-archive/{ invalid_timestamp_key } " )
449-
450- # Case 2: non-parseable timestamp
451- invalid_timestamp_key2 = "Vaccination_Extended_Attributes_v1_5_X8E5B_20XX0101T00000001.csv"
452- s3_client .put_object (
453- Bucket = BucketNames .SOURCE , Key = invalid_timestamp_key2 , Body = MOCK_EXTENDED_ATTRIBUTES_FILE_CONTENT
454- )
455- with patch ("file_name_processor.uuid4" , return_value = "invalid_timestamp_id2" ):
456- lambda_handler (self .make_event ([self .make_record (invalid_timestamp_key2 )]), None )
457- # Failed audit and archive
458- item2 = self .get_audit_table_items ()[- 1 ]
459- self .assertEqual (item2 [AuditTableKeys .STATUS ]["S" ], "Failed" )
460- s3_client .get_object (Bucket = BucketNames .SOURCE , Key = f"extended-attributes-archive/{ invalid_timestamp_key2 } " )
439+ invalid_cases = [
440+ ("Vaccination_Extended_Attributes_v1_5_X8E5B_20000101T0000.csv" , "invalid_timestamp_id" ),
441+ ("Vaccination_Extended_Attributes_v1_5_X8E5B_20XX0101T00000001.csv" , "invalid_timestamp_id2" ),
442+ ]
443+
444+ for file_key , fake_msg_id in invalid_cases :
445+ with self .subTest (f"Invalid timestamp test for: { file_key } " ):
446+ # Upload the invalid file
447+ s3_client .put_object (
448+ Bucket = BucketNames .SOURCE ,
449+ Key = file_key ,
450+ Body = MOCK_EXTENDED_ATTRIBUTES_FILE_CONTENT ,
451+ )
452+
453+ with patch ("file_name_processor.uuid4" , return_value = fake_msg_id ):
454+ lambda_handler (self .make_event ([self .make_record (file_key )]), None )
455+
456+ # Validate audit entry
457+ audit_items = self .get_audit_table_items ()
458+ last_item = audit_items [- 1 ] # always get the most recent write
459+ self .assertEqual (last_item [AuditTableKeys .STATUS ]["S" ], "Failed" )
460+
461+ archived_key = f"extended-attributes-archive/{ file_key } "
462+ retrieved = s3_client .get_object (Bucket = BucketNames .SOURCE , Key = archived_key )
463+ self .assertIsNotNone (retrieved )
461464
462465 @patch ("elasticache.get_redis_client" )
463466 def test_lambda_handler_extended_attributes_extension_checks (self , mock_get_redis_client ):
@@ -569,16 +572,11 @@ def test_lambda_handler_extended_attributes_extension_checks(self, mock_get_redi
569572 patch ("file_name_processor.uuid4" , return_value = test_cases [0 ].message_id ),
570573 patch (
571574 "file_name_processor.copy_file_to_external_bucket" ,
572- side_effect = lambda src_bucket , key , dst_bucket , dst_key , exp_owner , exp_src_owner : (
573- # effectively do nothing
574- None ,
575- ),
575+ side_effect = lambda src_bucket , key , dst_bucket , dst_key , exp_owner , exp_src_owner : (None ,),
576576 ),
577577 ):
578578 lambda_handler (self .make_event ([self .make_record (invalid_file_key )]), None )
579579
580- # Assert audit table entry captured with Failed and queue_name set to the identifier.
581- # Assert that the ClientError message is an InvalidFileKeyError.
582580 table_items = self .get_audit_table_items ()
583581 # Removed brittle assertion on total audit count; subsequent checks below verify the expected audit content
584582 item = table_items [- 1 ]
@@ -594,7 +592,6 @@ def test_lambda_handler_extended_attributes_extension_checks(self, mock_get_redi
594592 self .assertEqual (item [AuditTableKeys .EXPIRES_AT ]["N" ], str (test_cases [0 ].expires_at ))
595593 # File should be moved to source under archive/
596594 dest_key = f"extended-attributes-archive/{ invalid_file_key } "
597- print (f" destination file is at { s3_client .list_objects (Bucket = BucketNames .SOURCE )} " )
598595 retrieved = s3_client .get_object (Bucket = BucketNames .SOURCE , Key = dest_key )
599596 self .assertIsNotNone (retrieved )
600597
0 commit comments