Skip to content

Commit dda608a

Browse files
committed
Unit tests
1 parent b3c0f4a commit dda608a

File tree

5 files changed

+163
-26
lines changed

5 files changed

+163
-26
lines changed

filenameprocessor/Makefile

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,20 @@
11
build:
22
docker build -t imms-lambda-build -f Dockerfile .
33

4-
package:build
4+
package: build
55
mkdir -p build
66
docker run --rm -v $(shell pwd)/build:/build imms-lambda-build
77

88
test:
99
python -m unittest
1010

11-
coverage run:
12-
coverage run -m unittest discover
11+
coverage-run:
12+
coverage run -m unittest discover
1313

14-
coverage report:
14+
coverage-report:
1515
coverage report -m
1616

17-
coverage html:
18-
coverage html
19-
20-
.PHONY: build package test
17+
coverage-html:
18+
coverage html
19+
20+
.PHONY: build package test coverage-run coverage-report coverage-html

filenameprocessor/src/file_name_processor.py

Lines changed: 15 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
(ODS code has multiple lengths)
77
"""
88

9+
import argparse
910
from uuid import uuid4
1011
from utils_for_filenameprocessor import get_created_at_formatted_string, move_file, invoke_filename_lambda
1112
from file_key_validation import validate_file_key
@@ -67,10 +68,9 @@ def handle_record(record) -> dict:
6768
# Get message_id if the file is not new, else assign one
6869
message_id = record.get("message_id", str(uuid4()))
6970

70-
vaccine_type, supplier = validate_file_key(file_key)
71-
7271
created_at_formatted_string = get_created_at_formatted_string(bucket_name, file_key)
7372

73+
vaccine_type, supplier = validate_file_key(file_key)
7474
permissions = validate_vaccine_type_permissions(vaccine_type=vaccine_type, supplier=supplier)
7575
if not is_existing_file:
7676
ensure_file_is_not_a_duplicate(file_key, created_at_formatted_string)
@@ -136,6 +136,8 @@ def handle_record(record) -> dict:
136136
"file_key": file_key,
137137
"message_id": message_id,
138138
"error": str(error),
139+
"vaccine_type": vaccine_type,
140+
"supplier": supplier
139141
}
140142

141143
elif "config" in bucket_name:
@@ -168,21 +170,25 @@ def lambda_handler(event: dict, context) -> None: # pylint: disable=unused-argu
168170
logger.info("Filename processor lambda task completed")
169171

170172

171-
if __name__ == "__main__":
173+
def run_local():
174+
parser = argparse.ArgumentParser("file_name_processor")
175+
parser.add_argument("--bucket", required=True, help="Bucket name.", type=str)
176+
parser.add_argument("--key", required=True, help="Object key.", type=str)
177+
args = parser.parse_args()
172178

173179
event = {
174180
"Records": [
175181
{
176182
"s3": {
177-
"bucket": {
178-
"name": "immunisation-batch-internal-dev-data-sources"
179-
},
180-
"object": {
181-
"key": "FLU_Vaccinations_v5_YGM41_20000101T00000001.csv"
182-
}
183+
"bucket": {"name": args.bucket},
184+
"object": {"key": args.key}
183185
}
184186
}
185187
]
186188
}
187189
print(event)
188190
print(lambda_handler(event=event, context={}))
191+
192+
193+
if __name__ == "__main__":
194+
run_local()

filenameprocessor/src/logging_decorator.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -6,8 +6,6 @@
66
from datetime import datetime
77
from functools import wraps
88
from clients import firehose_client, logger
9-
from file_key_validation import validate_file_key
10-
119

1210
STREAM_NAME = os.getenv("SPLUNK_FIREHOSE_NAME", "immunisation-fhir-api-internal-dev-splunk-firehose")
1311

@@ -53,11 +51,7 @@ def wrapper(*args, **kwargs):
5351
return result
5452

5553
except Exception as e:
56-
file_key = args[0]["s3"]["object"]["key"]
57-
vaccine_type, supplier = validate_file_key(file_key)
58-
59-
additional_log_data = {"statusCode": 500, "error": str(e), "vaccine_type": vaccine_type,
60-
"supplier": supplier}
54+
additional_log_data = {"statusCode": 500, "error": str(e)}
6155
generate_and_send_logs(start_time, base_log_data, additional_log_data, is_error_log=True)
6256
raise
6357

filenameprocessor/tests/test_lambda_handler.py

Lines changed: 108 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,5 @@
11
"""Tests for lambda_handler"""
2-
2+
import sys
33
from unittest.mock import patch
44
from unittest import TestCase
55
from json import loads as json_loads
@@ -154,6 +154,14 @@ def get_audit_table_items():
154154
"""Return all items in the audit table"""
155155
return dynamodb_client.scan(TableName=AUDIT_TABLE_NAME).get("Items", [])
156156

157+
def test_lambda_handler_no_file_key_throws_exception(self):
158+
"""Tests if exception is thrown when file_key is not provided"""
159+
160+
broken_record = {"Records": [{"s3": {"bucket": {"name": "test"}}}]}
161+
with patch("file_name_processor.logger") as mock_logger:
162+
lambda_handler(broken_record, None)
163+
mock_logger.error.assert_called_once()
164+
157165
def test_lambda_handler_new_file_success_and_first_in_queue(self):
158166
"""
159167
Tests that for a new file, which passes validation and is the only file processing for the supplier_vaccineType
@@ -458,6 +466,31 @@ def setUp(self):
458466
def tearDown(self):
459467
GenericTearDown(s3_client, firehose_client, sqs_client, dynamodb_client)
460468

469+
def test_elasticcache_failure_handled(self):
470+
"Tests if elastic cache failure is handled when service fails to send message"
471+
event = {
472+
"s3": {
473+
"bucket": {"name": "my-config-bucket"}, # triggers 'config' branch
474+
"object": {"key": "testfile.csv"}
475+
}
476+
}
477+
478+
with patch("file_name_processor.upload_to_elasticache", side_effect=Exception("Upload failed")), \
479+
patch("file_name_processor.logger") as mock_logger:
480+
481+
result = handle_record(event)
482+
483+
self.assertEqual(result["statusCode"], 500)
484+
self.assertEqual(result["message"], "Failed to upload file content to cache")
485+
self.assertEqual(result["file_key"], "testfile.csv")
486+
self.assertIn("error", result)
487+
self.assertEqual(result["vaccine_type"], "unknown")
488+
self.assertEqual(result["supplier"], "unknown")
489+
490+
mock_logger.error.assert_called_once()
491+
logged_msg = mock_logger.error.call_args[0][0]
492+
self.assertIn("Error uploading to cache", logged_msg)
493+
461494
def test_successful_processing_from_configs(self):
462495
"""Tests that the permissions config file content is uploaded to elasticache successfully"""
463496
fake_redis = fakeredis.FakeStrictRedis()
@@ -496,7 +529,7 @@ def test_successful_processing_from_configs(self):
496529
"file_key": ravs_rsv_file_details_1.file_key,
497530
"message_id": ravs_rsv_file_details_1.message_id,
498531
"vaccine_type": ravs_rsv_file_details_1.vaccine_type,
499-
"supplier": ravs_rsv_file_details_1.supplier,
532+
"supplier": ravs_rsv_file_details_1.supplier
500533
}
501534
self.assertEqual(result, expected_result)
502535

@@ -524,5 +557,78 @@ def test_successful_processing_from_configs(self):
524557
"file_key": ravs_rsv_file_details_2.file_key,
525558
"message_id": ravs_rsv_file_details_2.message_id,
526559
"error": "Initial file validation failed: RAVS does not have permissions for RSV",
560+
"vaccine_type": ravs_rsv_file_details_2.vaccine_type,
561+
"supplier": ravs_rsv_file_details_2.supplier
527562
}
528563
self.assertEqual(result, expected_result)
564+
565+
566+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
567+
@mock_s3
568+
@mock_dynamodb
569+
@mock_sqs
570+
@mock_firehose
571+
class TestUnexpectedBucket(TestCase):
572+
"""Tests for lambda_handler when an unexpected bucket name is used"""
573+
574+
def setUp(self):
575+
GenericSetUp(s3_client, firehose_client, sqs_client, dynamodb_client)
576+
577+
def tearDown(self):
578+
GenericTearDown(s3_client, firehose_client, sqs_client, dynamodb_client)
579+
580+
def test_unexpected_bucket_name(self):
581+
"""Tests if unkown bucket name is handled in lambda_handler"""
582+
record = {
583+
"s3": {
584+
"bucket": {"name": "unknown-bucket"},
585+
"object": {"key": "somefile.csv"}
586+
}
587+
}
588+
589+
with patch("file_name_processor.logger") as mock_logger:
590+
result = handle_record(record)
591+
592+
self.assertEqual(result["statusCode"], 500)
593+
self.assertIn("unexpected bucket name", result["message"])
594+
self.assertEqual(result["file_key"], "somefile.csv")
595+
self.assertEqual(result["vaccine_type"], "unknown")
596+
self.assertEqual(result["supplier"], "unknown")
597+
598+
mock_logger.error.assert_called_once()
599+
args = mock_logger.error.call_args[0]
600+
self.assertIn("Unable to process file", args[0])
601+
self.assertIn("somefile.csv", args)
602+
self.assertIn("unknown-bucket", args)
603+
604+
605+
class TestMainEntryPoint(TestCase):
606+
607+
def test_run_local_constructs_event_and_calls_lambda_handler(self):
608+
test_args = [
609+
"file_name_processor.py",
610+
"--bucket", "test-bucket",
611+
"--key", "some/path/file.csv"
612+
]
613+
614+
expected_event = {
615+
"Records": [
616+
{
617+
"s3": {
618+
"bucket": {"name": "test-bucket"},
619+
"object": {"key": "some/path/file.csv"}
620+
}
621+
}
622+
]
623+
}
624+
625+
with (
626+
patch.object(sys, "argv", test_args),
627+
patch("file_name_processor.lambda_handler") as mock_lambda_handler,
628+
patch("file_name_processor.print") as mock_print
629+
):
630+
import file_name_processor
631+
file_name_processor.run_local()
632+
633+
mock_lambda_handler.assert_called_once_with(event=expected_event, context={})
634+
mock_print.assert_called()

filenameprocessor/tests/test_logging_decorator.py

Lines changed: 31 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66
from copy import deepcopy
77
from contextlib import ExitStack
88
from boto3 import client as boto3_client
9+
from botocore.exceptions import ClientError
910
from moto import mock_s3, mock_firehose, mock_sqs, mock_dynamodb
1011

1112
from tests.utils_for_tests.generic_setup_and_teardown import GenericSetUp, GenericTearDown
@@ -192,13 +193,43 @@ def test_logging_failed_validation(self):
192193
"file_key": FILE_DETAILS.file_key,
193194
"message_id": FILE_DETAILS.message_id,
194195
"error": "Initial file validation failed: EMIS does not have permissions for FLU",
196+
"vaccine_type": "FLU",
197+
"supplier": "EMIS"
195198
}
196199

197200
log_data = json.loads(mock_logger.info.call_args[0][0])
198201
self.assertEqual(log_data, expected_log_data)
199202

200203
mock_send_log_to_firehose.assert_called_once_with(log_data)
201204

205+
def test_logging_throws_exception(self):
206+
"""Tests that exception is caught when failing to send message to Firehose"""
207+
permissions_config_content = generate_permissions_config_content({"EMIS": ["COVID19_FULL"]})
208+
209+
firehose_exception = ClientError(
210+
error_response={"Error": {"Code": "ServiceUnavailable", "Message": "Service down"}},
211+
operation_name="PutRecord"
212+
)
213+
214+
with (
215+
patch("file_name_processor.uuid4", return_value=FILE_DETAILS.message_id),
216+
patch("elasticache.redis_client.get", return_value=permissions_config_content),
217+
patch("logging_decorator.firehose_client.put_record", side_effect=firehose_exception),
218+
patch("logging_decorator.logger") as mock_logger,
219+
):
220+
lambda_handler(MOCK_VACCINATION_EVENT, context=None)
221+
222+
# Assert logger.exception was called once
223+
mock_logger.exception.assert_called_once()
224+
225+
# Extract the call arguments
226+
exception_message = mock_logger.exception.call_args[0][0]
227+
exception_obj = mock_logger.exception.call_args[0][1]
228+
229+
# Check that the message format is correct
230+
self.assertIn("Error sending log to Firehose", exception_message)
231+
self.assertEqual(exception_obj, firehose_exception)
232+
202233
def test_logging_successful_config_upload(self):
203234
"""
204235
Tests that the correct logs are sent to cloudwatch and splunk when the config cache is successfully updated

0 commit comments

Comments
 (0)