Skip to content

Commit e9bac32

Browse files
committed
mocking with filenameprocessor tests
1 parent ed7a295 commit e9bac32

12 files changed

+212
-183
lines changed

filenameprocessor/src/file_key_validation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from re import match
44
from datetime import datetime
55
from constants import Constants
6-
from vaccine_types import get_valid_vaccine_types_from_cache
6+
from supplier_permissions import get_valid_vaccine_types_from_cache
77
from utils_for_filenameprocessor import identify_supplier
88
from errors import InvalidFileKeyError
99

filenameprocessor/src/supplier_permissions.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
import json
55
from constants import PERMISSIONS_CONFIG_FILE_KEY, VACCINE_TYPE_TO_DISEASES_HASH_KEY
66
from errors import VaccineTypePermissionsError
7-
from elasticache import get_permissions_config_json_from_cache
87

98

109
def get_supplier_permissions(supplier: str) -> list[str]:
@@ -22,6 +21,9 @@ def get_permissions_config_json_from_cache() -> dict:
2221
"""Gets and returns the permissions config file content from ElastiCache (Redis)."""
2322
return json.loads(redis_client.get(PERMISSIONS_CONFIG_FILE_KEY))
2423

24+
def get_valid_vaccine_types_from_cache() -> list[str]:
25+
return redis_client.hkeys(VACCINE_TYPE_TO_DISEASES_HASH_KEY)
26+
2527

2628
def validate_vaccine_type_permissions(vaccine_type: str, supplier: str) -> list:
2729
"""
@@ -31,7 +33,7 @@ def validate_vaccine_type_permissions(vaccine_type: str, supplier: str) -> list:
3133
supplier_permissions = get_supplier_permissions(supplier)
3234

3335
# Validate that supplier has at least one permissions for the vaccine type
34-
if not any(permission.split(".")[0] == vaccine_type for permission in supplier_permissions):
36+
if not any(vaccine_type in permission for permission in supplier_permissions):
3537
error_message = f"Initial file validation failed: {supplier} does not have permissions for {vaccine_type}"
3638
logger.error(error_message)
3739
raise VaccineTypePermissionsError(error_message)

filenameprocessor/src/utils_for_filenameprocessor.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -19,10 +19,14 @@ def identify_supplier(ods_code: str) -> str:
1919
return Constants.ODS_TO_SUPPLIER_MAPPINGS.get(ods_code, "")
2020

2121

22-
def move_file(bucket_name: str, source_file_key: str, destination_file_key: str) -> None:
23-
"""Moves a file from one location to another within a single S3 bucket by copying and then deleting the file."""
22+
def move_file(s3_client, bucket_name: str, source_file_key: str, destination_file_key: str) -> None:
23+
"""
24+
Moves a file from one location to another within a single S3 bucket by copying and then deleting the file.
25+
"""
2426
s3_client.copy_object(
25-
Bucket=bucket_name, CopySource={"Bucket": bucket_name, "Key": source_file_key}, Key=destination_file_key
27+
Bucket=bucket_name,
28+
CopySource={"Bucket": bucket_name, "Key": source_file_key},
29+
Key=destination_file_key,
2630
)
2731
s3_client.delete_object(Bucket=bucket_name, Key=source_file_key)
2832
logger.info("File moved from %s to %s", source_file_key, destination_file_key)

filenameprocessor/tests/test_audit_table.py

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,8 +2,8 @@
22

33
from unittest import TestCase
44
from unittest.mock import patch
5-
from boto3 import client as boto3_client
6-
from moto import mock_dynamodb
5+
import boto3
6+
from moto import mock_dynamodb, mock_sqs
77

88
from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT
99
from tests.utils_for_tests.generic_setup_and_teardown import GenericSetUp, GenericTearDown
@@ -23,15 +23,34 @@
2323

2424
FILE_DETAILS = MockFileDetails.ravs_rsv_1
2525

26-
26+
@mock_sqs
2727
@mock_dynamodb
2828
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
2929
class TestAuditTable(TestCase):
3030
"""Tests for audit table functions"""
3131

3232
def setUp(self):
3333
"""Set up test values to be used for the tests"""
34-
self.dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME)
34+
self.dynamodb_client = boto3.client("dynamodb", region_name=REGION_NAME)
35+
self.dynamodb_resource = boto3.resource("dynamodb", region_name=REGION_NAME)
36+
37+
# Patch both the client and resource inside the audit_table module
38+
patcher_client = patch("audit_table.dynamodb_client", self.dynamodb_client)
39+
patcher_resource = patch("audit_table.dynamodb_resource", self.dynamodb_resource)
40+
41+
patcher_client.start()
42+
patcher_resource.start()
43+
44+
self.addCleanup(patcher_client.stop)
45+
self.addCleanup(patcher_resource.stop)
46+
47+
# Patch both the client and resource used inside the audit_table module
48+
patcher_client = patch("audit_table.dynamodb_client", self.dynamodb_client)
49+
50+
patcher_client.start()
51+
52+
self.addCleanup(patcher_client.stop)
53+
3554
GenericSetUp(dynamodb_client=self.dynamodb_client)
3655

3756
def tearDown(self):
@@ -68,6 +87,7 @@ def test_get_next_queued_file_details(self):
6887
add_entry_to_table(MockFileDetails.ravs_rsv_3, file_status=FileStatus.QUEUED)
6988
add_entry_to_table(MockFileDetails.ravs_rsv_4, file_status=FileStatus.QUEUED)
7089
self.assertEqual(get_next_queued_file_details(queue_to_check), deserialize_dynamodb_types(expected_table_entry))
90+
7191

7292
def test_ensure_file_is_not_a_duplicate(self):
7393
"""
@@ -222,7 +242,7 @@ def test_upsert_audit_table(self):
222242
)
223243

224244
# Final reconciliation: ensure that all of the correct items are in the audit table
225-
table_items = self.get_table_items()
245+
table_items = self.get_table_items(self)
226246
assert len(table_items) == 7
227247
assert_audit_table_entry(MockFileDetails.emis_flu, FileStatus.QUEUED)
228248
assert_audit_table_entry(MockFileDetails.emis_rsv, FileStatus.QUEUED)

filenameprocessor/tests/test_file_key_validation.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def test_is_valid_datetime(self):
3838
with self.subTest():
3939
self.assertEqual(is_valid_datetime(date_time_string), expected_result)
4040

41-
@patch("elasticache.redis_client.hkeys", return_value=["FLU", "RSV"])
41+
@patch("supplier_permissions.redis_client.hkeys", return_value=["FLU", "RSV"])
4242
def test_validate_file_key(self, _mock_hkeys):
4343
"""Tests that file_key_validation returns True if all elements pass validation, and False otherwise"""
4444
# Test case tuples are structured as (file_key, expected_result)

filenameprocessor/tests/test_make_and_upload_ack_file.py

Lines changed: 20 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,8 @@
33
from unittest import TestCase
44
from unittest.mock import patch
55
from copy import deepcopy
6-
from boto3 import client as boto3_client
7-
from moto import mock_s3
6+
import boto3
7+
from moto import mock_s3, mock_sqs
88

99
from tests.utils_for_tests.utils_for_filenameprocessor_tests import get_csv_file_dict_reader
1010
from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT, BucketNames
@@ -15,9 +15,6 @@
1515
from make_and_upload_ack_file import make_the_ack_data, upload_ack_file, make_and_upload_the_ack_file
1616
from clients import REGION_NAME
1717

18-
19-
s3_client = boto3_client("s3", region_name=REGION_NAME)
20-
2118
FILE_DETAILS = MockFileDetails.emis_flu
2219

2320
# NOTE: The expected ack data is the same for all scenarios as the ack file is only created if an error occurs
@@ -38,14 +35,16 @@
3835
}
3936

4037

38+
@mock_sqs
4139
@mock_s3
4240
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
4341
class TestMakeAndUploadAckFile(TestCase):
4442
"""Tests for make_and_upload_ack_file functions"""
4543

4644
def setUp(self):
4745
"""Set up the bucket for the ack files"""
48-
s3_client.create_bucket(
46+
self.s3_client = boto3.client("s3", region_name=REGION_NAME)
47+
self.s3_client.create_bucket(
4948
Bucket=BucketNames.DESTINATION, CreateBucketConfiguration={"LocationConstraint": REGION_NAME}
5049
)
5150

@@ -63,29 +62,31 @@ def test_make_ack_data(self):
6362

6463
def test_upload_ack_file(self):
6564
"""Test that upload_ack_file successfully uploads the ack file"""
66-
upload_ack_file(
67-
file_key=FILE_DETAILS.file_key,
68-
ack_data=deepcopy(EXPECTED_ACK_DATA),
69-
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
70-
)
65+
with patch("make_and_upload_ack_file.s3_client", self.s3_client):
66+
upload_ack_file(
67+
file_key=FILE_DETAILS.file_key,
68+
ack_data=deepcopy(EXPECTED_ACK_DATA),
69+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
70+
)
7171

7272
expected_result = [deepcopy(EXPECTED_ACK_DATA)]
7373
# Note that the data downloaded from the CSV will contain the bool as a string
7474
expected_result[0]["MESSAGE_DELIVERY"] = "False"
75-
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.ack_file_key)
75+
csv_dict_reader = get_csv_file_dict_reader(self.s3_client, BucketNames.DESTINATION, FILE_DETAILS.ack_file_key)
7676
self.assertEqual(list(csv_dict_reader), expected_result)
7777

7878
def test_make_and_upload_ack_file(self):
7979
"""Test that make_and_upload_ack_file uploads an ack file containing the correct values"""
80-
make_and_upload_the_ack_file(
81-
message_id=FILE_DETAILS.message_id,
82-
file_key=FILE_DETAILS.file_key,
83-
message_delivered=False,
84-
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
85-
)
80+
with patch("make_and_upload_ack_file.s3_client", self.s3_client):
81+
make_and_upload_the_ack_file(
82+
message_id=FILE_DETAILS.message_id,
83+
file_key=FILE_DETAILS.file_key,
84+
message_delivered=False,
85+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
86+
)
8687

8788
expected_result = [deepcopy(EXPECTED_ACK_DATA)]
8889
# Note that the data downloaded from the CSV will contain the bool as a string
8990
expected_result[0]["MESSAGE_DELIVERY"] = "False"
90-
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.ack_file_key)
91+
csv_dict_reader = get_csv_file_dict_reader(self.s3_client, BucketNames.DESTINATION, FILE_DETAILS.ack_file_key)
9192
self.assertEqual(list(csv_dict_reader), expected_result)

filenameprocessor/tests/test_send_sqs_message.py

Lines changed: 65 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
from json import loads as json_loads
66
from copy import deepcopy
77
from moto import mock_sqs
8-
from boto3 import client as boto3_client
8+
import boto3
99

1010
from tests.utils_for_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT, Sqs
1111
from tests.utils_for_tests.values_for_tests import MockFileDetails
@@ -16,10 +16,9 @@
1616
from errors import UnhandledSqsError, InvalidSupplierError
1717
from clients import REGION_NAME
1818

19-
sqs_client = boto3_client("sqs", region_name=REGION_NAME)
20-
2119
FLU_EMIS_FILE_DETAILS = MockFileDetails.emis_flu
2220
RSV_RAVS_FILE_DETAILS = MockFileDetails.ravs_rsv_1
21+
fake_queue_url = "https://sqs.eu-west-2.amazonaws.com/123456789012/non_existent_queue"
2322

2423
NON_EXISTENT_QUEUE_ERROR_MESSAGE = (
2524
"An unexpected error occurred whilst sending to SQS: An error occurred (AWS.SimpleQueueService.NonExistent"
@@ -32,54 +31,63 @@
3231
class TestSendSQSMessage(TestCase):
3332
"""Tests for send_sqs_message functions"""
3433

34+
def setUp(self):
35+
self.sqs_client = boto3.client("sqs", region_name=REGION_NAME)
36+
self.queue_url = self.sqs_client.create_queue(
37+
QueueName=Sqs.QUEUE_NAME, Attributes=Sqs.ATTRIBUTES
38+
)["QueueUrl"]
39+
3540
def test_send_to_supplier_queue_success(self):
3641
"""Test send_to_supplier_queue function for a successful message send"""
37-
# Set up the sqs_queue
38-
queue_url = sqs_client.create_queue(QueueName=Sqs.QUEUE_NAME, Attributes=Sqs.ATTRIBUTES)["QueueUrl"]
39-
40-
# Send three separate messages to the queue to test that they are all received and appropriately
41-
# partitioned by supplier and vaccine_type
4242
flu_emis_1 = deepcopy(FLU_EMIS_FILE_DETAILS)
4343
flu_emis_2 = deepcopy(FLU_EMIS_FILE_DETAILS)
4444
flu_emis_2.sqs_message_body["message_id"] = "flu_emis_test_id_2"
4545
rsv_ravs_1 = deepcopy(RSV_RAVS_FILE_DETAILS)
4646

47-
for file_details in [flu_emis_1, rsv_ravs_1, flu_emis_2]:
48-
self.assertIsNone(
49-
send_to_supplier_queue(
50-
message_body=deepcopy(file_details.sqs_message_body),
51-
vaccine_type=file_details.vaccine_type,
52-
supplier=file_details.supplier,
47+
with patch.dict("os.environ", {"QUEUE_URL": self.queue_url}):
48+
with patch("send_sqs_message.sqs_client", self.sqs_client):
49+
for file_details in [flu_emis_1, rsv_ravs_1, flu_emis_2]:
50+
self.assertIsNone(
51+
send_to_supplier_queue(
52+
message_body=deepcopy(file_details.sqs_message_body),
53+
vaccine_type=file_details.vaccine_type,
54+
supplier=file_details.supplier,
55+
)
5356
)
54-
)
5557

56-
# Check that the FIFO queue contains the expected messages, in correct order, and with correct MessageGroupId
57-
received_messages = sqs_client.receive_message(
58-
QueueUrl=queue_url, MaxNumberOfMessages=10, AttributeNames=["All"]
59-
)["Messages"]
58+
# Verify messages in queue
59+
messages = self.sqs_client.receive_message(
60+
QueueUrl=self.queue_url, MaxNumberOfMessages=10, AttributeNames=["All"]
61+
)["Messages"]
6062

61-
self.assertEqual(len(received_messages), 3)
62-
self.assertEqual(json_loads(received_messages[0]["Body"]), flu_emis_1.sqs_message_body)
63-
self.assertEqual(received_messages[0]["Attributes"]["MessageGroupId"], flu_emis_1.queue_name)
64-
self.assertEqual(json_loads(received_messages[1]["Body"]), rsv_ravs_1.sqs_message_body)
65-
self.assertEqual(received_messages[1]["Attributes"]["MessageGroupId"], rsv_ravs_1.queue_name)
66-
self.assertEqual(json_loads(received_messages[2]["Body"]), flu_emis_2.sqs_message_body)
67-
self.assertEqual(received_messages[2]["Attributes"]["MessageGroupId"], flu_emis_2.queue_name)
63+
self.assertEqual(len(messages), 3)
64+
self.assertEqual(json_loads(messages[0]["Body"]), flu_emis_1.sqs_message_body)
65+
self.assertEqual(messages[0]["Attributes"]["MessageGroupId"], flu_emis_1.queue_name)
66+
self.assertEqual(json_loads(messages[1]["Body"]), rsv_ravs_1.sqs_message_body)
67+
self.assertEqual(messages[1]["Attributes"]["MessageGroupId"], rsv_ravs_1.queue_name)
68+
self.assertEqual(json_loads(messages[2]["Body"]), flu_emis_2.sqs_message_body)
69+
self.assertEqual(messages[2]["Attributes"]["MessageGroupId"], flu_emis_2.queue_name)
6870

6971
def test_send_to_supplier_queue_failure_due_to_queue_does_not_exist(self):
7072
"""Test send_to_supplier_queue function for a failed message send due to queue not existing"""
71-
with self.assertRaises(UnhandledSqsError) as context:
72-
send_to_supplier_queue(
73+
74+
with patch.dict("os.environ", {"QUEUE_URL": fake_queue_url}):
75+
with patch("send_sqs_message.sqs_client", self.sqs_client):
76+
with self.assertRaises(UnhandledSqsError) as context:
77+
send_to_supplier_queue(
7378
message_body=deepcopy(FLU_EMIS_FILE_DETAILS.sqs_message_body),
7479
vaccine_type=FLU_EMIS_FILE_DETAILS.vaccine_type,
7580
supplier=FLU_EMIS_FILE_DETAILS.supplier,
7681
)
77-
self.assertEqual(NON_EXISTENT_QUEUE_ERROR_MESSAGE, str(context.exception))
82+
self.assertIn("An unexpected error occurred whilst sending to SQS", str(context.exception))
83+
self.assertTrue(
84+
"Queue does not exist" in str(context.exception) or "NonExistentQueue" in str(context.exception)
85+
)
7886

7987
def test_send_to_supplier_queue_failure_due_to_absent_supplier_or_vaccine_type(self):
8088
"""Test send_to_supplier_queue function for a failed message send"""
8189
# Set up the sqs_queue
82-
sqs_client.create_queue(QueueName=Sqs.QUEUE_NAME, Attributes=Sqs.ATTRIBUTES)
90+
self.sqs_client.create_queue(QueueName=Sqs.QUEUE_NAME, Attributes=Sqs.ATTRIBUTES)
8391
expected_error_message = (
8492
"Message not sent to supplier queue as unable to identify supplier and/ or vaccine type"
8593
)
@@ -100,33 +108,36 @@ def test_send_to_supplier_queue_failure_due_to_absent_supplier_or_vaccine_type(s
100108
def test_make_and_send_sqs_message_success(self):
101109
"""Test make_and_send_sqs_message function for a successful message send"""
102110
# Create a mock SQS queue
103-
queue_url = sqs_client.create_queue(QueueName=Sqs.QUEUE_NAME, Attributes=Sqs.ATTRIBUTES)["QueueUrl"]
111+
queue_url = self.sqs_client.create_queue(QueueName=Sqs.QUEUE_NAME, Attributes=Sqs.ATTRIBUTES)["QueueUrl"]
104112

105113
# Call the send_to_supplier_queue function
106-
self.assertIsNone(
107-
make_and_send_sqs_message(
108-
file_key=FLU_EMIS_FILE_DETAILS.file_key,
109-
message_id=FLU_EMIS_FILE_DETAILS.message_id,
110-
permission=deepcopy(FLU_EMIS_FILE_DETAILS.permissions_list),
111-
vaccine_type=FLU_EMIS_FILE_DETAILS.vaccine_type,
112-
supplier=FLU_EMIS_FILE_DETAILS.supplier,
113-
created_at_formatted_string=FLU_EMIS_FILE_DETAILS.created_at_formatted_string,
114-
)
114+
with patch.dict("os.environ", {"QUEUE_URL": queue_url}):
115+
with patch("send_sqs_message.sqs_client", self.sqs_client):
116+
self.assertIsNone(
117+
make_and_send_sqs_message(
118+
file_key=FLU_EMIS_FILE_DETAILS.file_key,
119+
message_id=FLU_EMIS_FILE_DETAILS.message_id,
120+
permission=deepcopy(FLU_EMIS_FILE_DETAILS.permissions_list),
121+
vaccine_type=FLU_EMIS_FILE_DETAILS.vaccine_type,
122+
supplier=FLU_EMIS_FILE_DETAILS.supplier,
123+
created_at_formatted_string=FLU_EMIS_FILE_DETAILS.created_at_formatted_string,
124+
)
115125
)
116-
117-
# Assert that correct message has reached the queue
118-
messages = sqs_client.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=1)
119-
self.assertEqual(json_loads(messages["Messages"][0]["Body"]), deepcopy(FLU_EMIS_FILE_DETAILS.sqs_message_body))
126+
# Assert that correct message has reached the queue
127+
messages = self.sqs_client.receive_message(QueueUrl=queue_url, MaxNumberOfMessages=1)
128+
self.assertEqual(json_loads(messages["Messages"][0]["Body"]), deepcopy(FLU_EMIS_FILE_DETAILS.sqs_message_body))
120129

121130
def test_make_and_send_sqs_message_failure(self):
122-
"""Test make_and_send_sqs_message function for a failure due to queue not existing"""
123-
with self.assertRaises(UnhandledSqsError) as context:
124-
make_and_send_sqs_message(
125-
file_key=FLU_EMIS_FILE_DETAILS.file_key,
126-
message_id=FLU_EMIS_FILE_DETAILS.message_id,
127-
permission=deepcopy(FLU_EMIS_FILE_DETAILS.permissions_list),
128-
vaccine_type=FLU_EMIS_FILE_DETAILS.vaccine_type,
129-
supplier=FLU_EMIS_FILE_DETAILS.supplier,
130-
created_at_formatted_string=FLU_EMIS_FILE_DETAILS.created_at_formatted_string,
131-
)
131+
132+
with patch.dict("os.environ", {"QUEUE_URL": fake_queue_url}):
133+
with patch("send_sqs_message.sqs_client", boto3.client("sqs", region_name=REGION_NAME)):
134+
with self.assertRaises(UnhandledSqsError) as context:
135+
make_and_send_sqs_message(
136+
file_key=FLU_EMIS_FILE_DETAILS.file_key,
137+
message_id=FLU_EMIS_FILE_DETAILS.message_id,
138+
permission=deepcopy(FLU_EMIS_FILE_DETAILS.permissions_list),
139+
vaccine_type=FLU_EMIS_FILE_DETAILS.vaccine_type,
140+
supplier=FLU_EMIS_FILE_DETAILS.supplier,
141+
created_at_formatted_string=FLU_EMIS_FILE_DETAILS.created_at_formatted_string,
142+
)
132143
self.assertIn(NON_EXISTENT_QUEUE_ERROR_MESSAGE, str(context.exception))

0 commit comments

Comments
 (0)