|
1 | 1 | """Tests for lambda_handler""" |
2 | | - |
| 2 | +import sys |
3 | 3 | from unittest.mock import patch |
4 | 4 | from unittest import TestCase |
5 | 5 | from json import loads as json_loads |
@@ -154,6 +154,14 @@ def get_audit_table_items(): |
154 | 154 | """Return all items in the audit table""" |
155 | 155 | return dynamodb_client.scan(TableName=AUDIT_TABLE_NAME).get("Items", []) |
156 | 156 |
|
| 157 | + def test_lambda_handler_no_file_key_throws_exception(self): |
| 158 | + """Tests if exception is thrown when file_key is not provided""" |
| 159 | + |
| 160 | + broken_record = {"Records": [{"s3": {"bucket": {"name": "test"}}}]} |
| 161 | + with patch("file_name_processor.logger") as mock_logger: |
| 162 | + lambda_handler(broken_record, None) |
| 163 | + mock_logger.error.assert_called_once() |
| 164 | + |
157 | 165 | def test_lambda_handler_new_file_success_and_first_in_queue(self): |
158 | 166 | """ |
159 | 167 | Tests that for a new file, which passes validation and is the only file processing for the supplier_vaccineType |
@@ -458,6 +466,29 @@ def setUp(self): |
458 | 466 | def tearDown(self): |
459 | 467 | GenericTearDown(s3_client, firehose_client, sqs_client, dynamodb_client) |
460 | 468 |
|
| 469 | + def test_elasticcache_failure_handled(self): |
| 470 | + "Tests if elastic cache failure is handled when service fails to send message" |
| 471 | + event = { |
| 472 | + "s3": { |
| 473 | + "bucket": {"name": "my-config-bucket"}, # triggers 'config' branch |
| 474 | + "object": {"key": "testfile.csv"} |
| 475 | + } |
| 476 | + } |
| 477 | + |
| 478 | + with patch("file_name_processor.upload_to_elasticache", side_effect=Exception("Upload failed")), \ |
| 479 | + patch("file_name_processor.logger") as mock_logger: |
| 480 | + |
| 481 | + result = handle_record(event) |
| 482 | + |
| 483 | + self.assertEqual(result["statusCode"], 500) |
| 484 | + self.assertEqual(result["message"], "Failed to upload file content to cache") |
| 485 | + self.assertEqual(result["file_key"], "testfile.csv") |
| 486 | + self.assertIn("error", result) |
| 487 | + |
| 488 | + mock_logger.error.assert_called_once() |
| 489 | + logged_msg = mock_logger.error.call_args[0][0] |
| 490 | + self.assertIn("Error uploading to cache", logged_msg) |
| 491 | + |
461 | 492 | def test_successful_processing_from_configs(self): |
462 | 493 | """Tests that the permissions config file content is uploaded to elasticache successfully""" |
463 | 494 | fake_redis = fakeredis.FakeStrictRedis() |
@@ -496,7 +527,7 @@ def test_successful_processing_from_configs(self): |
496 | 527 | "file_key": ravs_rsv_file_details_1.file_key, |
497 | 528 | "message_id": ravs_rsv_file_details_1.message_id, |
498 | 529 | "vaccine_type": ravs_rsv_file_details_1.vaccine_type, |
499 | | - "supplier": ravs_rsv_file_details_1.supplier, |
| 530 | + "supplier": ravs_rsv_file_details_1.supplier |
500 | 531 | } |
501 | 532 | self.assertEqual(result, expected_result) |
502 | 533 |
|
@@ -524,5 +555,104 @@ def test_successful_processing_from_configs(self): |
524 | 555 | "file_key": ravs_rsv_file_details_2.file_key, |
525 | 556 | "message_id": ravs_rsv_file_details_2.message_id, |
526 | 557 | "error": "Initial file validation failed: RAVS does not have permissions for RSV", |
| 558 | + "vaccine_type": ravs_rsv_file_details_2.vaccine_type, |
| 559 | + "supplier": ravs_rsv_file_details_2.supplier |
527 | 560 | } |
528 | 561 | self.assertEqual(result, expected_result) |
| 562 | + |
| 563 | + |
| 564 | +@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT) |
| 565 | +@mock_s3 |
| 566 | +@mock_dynamodb |
| 567 | +@mock_sqs |
| 568 | +@mock_firehose |
| 569 | +class TestUnexpectedBucket(TestCase): |
| 570 | + """Tests for lambda_handler when an unexpected bucket name is used""" |
| 571 | + |
| 572 | + def setUp(self): |
| 573 | + GenericSetUp(s3_client, firehose_client, sqs_client, dynamodb_client) |
| 574 | + |
| 575 | + def tearDown(self): |
| 576 | + GenericTearDown(s3_client, firehose_client, sqs_client, dynamodb_client) |
| 577 | + |
| 578 | + def test_unexpected_bucket_name(self): |
| 579 | + """Tests if unkown bucket name is handled in lambda_handler""" |
| 580 | + ravs_record = MockFileDetails.ravs_rsv_1 |
| 581 | + record = { |
| 582 | + "s3": { |
| 583 | + "bucket": {"name": "unknown-bucket"}, |
| 584 | + "object": {"key": ravs_record.file_key} |
| 585 | + } |
| 586 | + } |
| 587 | + |
| 588 | + with patch("file_name_processor.logger") as mock_logger: |
| 589 | + result = handle_record(record) |
| 590 | + |
| 591 | + self.assertEqual(result["statusCode"], 500) |
| 592 | + self.assertIn("unexpected bucket name", result["message"]) |
| 593 | + self.assertEqual(result["file_key"], ravs_record.file_key) |
| 594 | + self.assertEqual(result["vaccine_type"], ravs_record.vaccine_type) |
| 595 | + self.assertEqual(result["supplier"], ravs_record.supplier) |
| 596 | + |
| 597 | + mock_logger.error.assert_called_once() |
| 598 | + args = mock_logger.error.call_args[0] |
| 599 | + self.assertIn("Unable to process file", args[0]) |
| 600 | + self.assertIn(ravs_record.file_key, args) |
| 601 | + self.assertIn("unknown-bucket", args) |
| 602 | + |
| 603 | + def test_unexpected_bucket_name_and_filename_validation_fails(self): |
| 604 | + """Tests if filename validation error is handled when bucket name is incorrect""" |
| 605 | + invalid_file_key = "InvalidVaccineType_Vaccinations_v5_YGM41_20240708T12130100.csv" |
| 606 | + record = { |
| 607 | + "s3": { |
| 608 | + "bucket": {"name": "unknown-bucket"}, |
| 609 | + "object": {"key": invalid_file_key} |
| 610 | + } |
| 611 | + } |
| 612 | + |
| 613 | + with patch("file_name_processor.logger") as mock_logger: |
| 614 | + result = handle_record(record) |
| 615 | + |
| 616 | + self.assertEqual(result["statusCode"], 500) |
| 617 | + self.assertIn("unexpected bucket name", result["message"]) |
| 618 | + self.assertEqual(result["file_key"], invalid_file_key) |
| 619 | + self.assertEqual(result["vaccine_type"], "unknown") |
| 620 | + self.assertEqual(result["supplier"], "unknown") |
| 621 | + |
| 622 | + mock_logger.error.assert_called_once() |
| 623 | + args = mock_logger.error.call_args[0] |
| 624 | + self.assertIn("Unable to process file", args[0]) |
| 625 | + self.assertIn(invalid_file_key, args) |
| 626 | + self.assertIn("unknown-bucket", args) |
| 627 | + |
| 628 | + |
| 629 | +class TestMainEntryPoint(TestCase): |
| 630 | + |
| 631 | + def test_run_local_constructs_event_and_calls_lambda_handler(self): |
| 632 | + test_args = [ |
| 633 | + "file_name_processor.py", |
| 634 | + "--bucket", "test-bucket", |
| 635 | + "--key", "some/path/file.csv" |
| 636 | + ] |
| 637 | + |
| 638 | + expected_event = { |
| 639 | + "Records": [ |
| 640 | + { |
| 641 | + "s3": { |
| 642 | + "bucket": {"name": "test-bucket"}, |
| 643 | + "object": {"key": "some/path/file.csv"} |
| 644 | + } |
| 645 | + } |
| 646 | + ] |
| 647 | + } |
| 648 | + |
| 649 | + with ( |
| 650 | + patch.object(sys, "argv", test_args), |
| 651 | + patch("file_name_processor.lambda_handler") as mock_lambda_handler, |
| 652 | + patch("file_name_processor.print") as mock_print |
| 653 | + ): |
| 654 | + import file_name_processor |
| 655 | + file_name_processor.run_local() |
| 656 | + |
| 657 | + mock_lambda_handler.assert_called_once_with(event=expected_event, context={}) |
| 658 | + mock_print.assert_called() |
0 commit comments