Skip to content

Commit 54b6675

Browse files
committed
updated unit tests for recordprocessor
1 parent 53866ca commit 54b6675

File tree

6 files changed

+285
-5
lines changed

6 files changed

+285
-5
lines changed

recordprocessor/src/send_to_kinesis.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ def send_to_kinesis(supplier: str, message_body: dict, vaccine_type: str) -> boo
1515
Data=json.dumps(message_body, ensure_ascii=False),
1616
PartitionKey=f"{supplier}_{vaccine_type}",
1717
)
18+
return True
1819
except ClientError as error:
1920
logger.error("Error sending message to Kinesis: %s", error)
2021
raise

recordprocessor/tests/test_make_and_upload_ack_file.py

Lines changed: 84 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,31 @@
22

33
import unittest
44
from unittest.mock import patch
5-
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT
5+
from copy import deepcopy
6+
from boto3 import client as boto3_client
7+
from moto import mock_s3
8+
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT, BucketNames
9+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import get_csv_file_dict_reader
10+
from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import MockFileDetails
611

712
with patch("os.environ", MOCK_ENVIRONMENT_DICT):
8-
from make_and_upload_ack_file import make_ack_data
13+
from src.make_and_upload_ack_file import make_ack_data, upload_ack_file, make_and_upload_ack_file
14+
from clients import REGION_NAME
915

16+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import GenericSetUp, GenericTearDown
1017

18+
s3_client = boto3_client("s3", region_name=REGION_NAME)
19+
20+
FILE_DETAILS = MockFileDetails.flu_emis
21+
22+
@mock_s3
23+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
1124
class TestMakeAndUploadAckFile(unittest.TestCase):
1225
"Tests for make_and_upload_ack_file functions"
1326

1427
def setUp(self) -> None:
1528
self.message_id = "test_id"
16-
self.created_at_formatted_string = "20200101T12345600"
29+
self.created_at_formatted_string = "20211120T12000000"
1730
self.ack_data_validation_passed_and_message_delivered = {
1831
"MESSAGE_HEADER_ID": self.message_id,
1932
"HEADER_RESPONSE_CODE": "Success",
@@ -57,6 +70,11 @@ def setUp(self) -> None:
5770
"MESSAGE_DELIVERY": False,
5871
}
5972

73+
GenericSetUp(s3_client)
74+
75+
def tearDown(self):
76+
GenericTearDown(s3_client)
77+
6078
def test_make_ack_data(self):
6179
"Tests make_ack_data makes correct ack data based on the input args"
6280
# Test case tuples are stuctured as (validation_passed, message_delivered, expected_result)
@@ -73,8 +91,69 @@ def test_make_ack_data(self):
7391
make_ack_data(
7492
self.message_id, validation_passed, message_delivered, self.created_at_formatted_string
7593
),
76-
expected_result,
77-
)
94+
expected_result,
95+
)
96+
97+
def test_upload_ack_file_success(self):
98+
"""Test that upload_ack_file successfully uploads the ack file"""
99+
100+
upload_ack_file(
101+
file_key=FILE_DETAILS.file_key,
102+
ack_data=deepcopy(self.ack_data_validation_passed_and_message_delivered),
103+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
104+
)
105+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_delivered)]
106+
# Note that the data downloaded from the CSV will contain the bool as a string
107+
expected_result[0]["MESSAGE_DELIVERY"] = "True"
108+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
109+
self.assertEqual(list(csv_dict_reader), expected_result)
110+
111+
def test_upload_ack_file_failure(self):
112+
"""Test that upload_ack_file failed to upload the ack file"""
113+
114+
upload_ack_file(
115+
file_key=FILE_DETAILS.file_key,
116+
ack_data=deepcopy(self.ack_data_validation_passed_and_message_not_delivered),
117+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
118+
)
119+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_not_delivered)]
120+
# Note that the data downloaded from the CSV will contain the bool as a string
121+
expected_result[0]["MESSAGE_DELIVERY"] = "False"
122+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
123+
self.assertEqual(list(csv_dict_reader), expected_result)
124+
125+
def test_make_and_upload_ack_file_success(self):
126+
"""Test that make_and_upload_ack_file uploads an ack file containing the correct values"""
127+
make_and_upload_ack_file(
128+
message_id=self.message_id,
129+
file_key=FILE_DETAILS.file_key,
130+
validation_passed=True,
131+
message_delivered=True,
132+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
133+
)
134+
135+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_delivered)]
136+
# Note that the data downloaded from the CSV will contain the bool as a string
137+
expected_result[0]["MESSAGE_DELIVERY"] = "True"
138+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
139+
self.assertEqual(list(csv_dict_reader), expected_result)
140+
141+
142+
def test_make_and_upload_ack_file_failure(self):
143+
"""Test that make_and_upload_ack_file failed to upload an ack file containing the correct values"""
144+
make_and_upload_ack_file(
145+
message_id=self.message_id,
146+
file_key=FILE_DETAILS.file_key,
147+
validation_passed=True,
148+
message_delivered=False,
149+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
150+
)
151+
152+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_not_delivered)]
153+
# Note that the data downloaded from the CSV will contain the bool as a string
154+
expected_result[0]["MESSAGE_DELIVERY"] = "False"
155+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
156+
self.assertEqual(list(csv_dict_reader), expected_result)
78157

79158

80159
if __name__ == "__main__":
Lines changed: 127 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
"""Tests for the process_row module"""
2+
3+
import unittest
4+
from unittest.mock import patch
5+
from copy import deepcopy
6+
from boto3 import client as boto3_client
7+
from moto import mock_s3
8+
from decimal import Decimal
9+
10+
11+
from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import (
12+
MockFieldDictionaries,
13+
MockFileRows,
14+
)
15+
16+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import (
17+
GenericSetUp,
18+
GenericTearDown,
19+
)
20+
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT
21+
22+
with patch("os.environ", MOCK_ENVIRONMENT_DICT):
23+
# Do not attempt 'from src.mappings import Vaccine' as this imports a different instance of Vaccine
24+
# and tests will break
25+
from mappings import Vaccine
26+
from clients import REGION_NAME
27+
from src.process_row import process_row
28+
29+
s3_client = boto3_client("s3", region_name=REGION_NAME)
30+
ROW_DETAILS = MockFieldDictionaries.all_fields
31+
Allowed_Operations = {'CREATE', 'UPDATE', 'DELETE'}
32+
33+
@mock_s3
34+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
35+
class TestProcessRow(unittest.TestCase):
36+
"""Tests for process_row"""
37+
38+
def setUp(self) -> None:
39+
GenericSetUp(s3_client)
40+
41+
def tearDown(self) -> None:
42+
GenericTearDown(s3_client)
43+
44+
def test_process_row_success(self):
45+
"""
46+
Test that process_row gives the expected output. These tests check that the row is valid and matches the expected output.
47+
"""
48+
# set the expected output from 'process_row' in case of success
49+
expected_result = {'resourceType': 'Immunization', 'status': 'completed', 'protocolApplied': [{'targetDisease': [], 'doseNumberPositiveInt': 1}], 'reasonCode': [{'coding': [{'system': 'http://snomed.info/sct', 'code': '1037351000000105'}]}], 'recorded': '2024-09-04', 'identifier': [{'value': 'RSV_002', 'system': 'https://www.ravs.england.nhs.uk/'}], 'patient': {'reference': '#Patient1'}, 'contained': [{'id': 'Patient1', 'resourceType': 'Patient', 'birthDate': '2008-02-17', 'gender': 'male', 'address': [{'postalCode': 'WD25 0DZ'}], 'identifier': [{'system': 'https://fhir.nhs.uk/Id/nhs-number', 'value': '9732928395'}], 'name': [{'family': 'PEEL', 'given': ['PHYLIS']}]}, {'resourceType': 'Practitioner', 'id': 'Practitioner1', 'name': [{'family': "O'Reilly", 'given': ['Ellena']}]}], 'vaccineCode': {'coding': [{'system': 'http://snomed.info/sct', 'code': '42223111000001107', 'display': 'Quadrivalent influenza vaccine (split virion, inactivated)'}]}, 'manufacturer': {'display': 'Sanofi Pasteur'}, 'expirationDate': '2024-09-15', 'lotNumber': 'BN92478105653', 'extension': [{'url': 'https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure', 'valueCodeableConcept': {'coding': [{'system': 'http://snomed.info/sct', 'code': '956951000000104', 'display': 'RSV vaccination in pregnancy (procedure)'}]}}], 'occurrenceDateTime': '2024-09-04T18:33:25+00:00', 'primarySource': True, 'site': {'coding': [{'system': 'http://snomed.info/sct', 'code': '368209003', 'display': 'Right arm'}]}, 'route': {'coding': [{'system': 'http://snomed.info/sct', 'code': '1210999013', 'display': 'Intradermal use'}]}, 'doseQuantity': {'value': Decimal('0.3'), 'unit': 'Inhalation - unit of product usage', 'system': 'http://snomed.info/sct', 'code': '2622896019'}, 'performer': [{'actor': {'type': 'Organization', 'identifier': {'system': 'https://fhir.nhs.uk/Id/ods-organization-code', 'value': 'RVVKC'}}}, {'actor': {'reference': '#Practitioner1'}}], 'location': {'identifier': {'value': 'RJC02', 'system': 'https://fhir.nhs.uk/Id/ods-organization-code'}}}
50+
51+
# call 'process_row' with required details
52+
imms_fhir_resource = process_row("EMIS", Allowed_Operations, ROW_DETAILS)
53+
# validate if the response with expected result
54+
self.assertDictEqual(imms_fhir_resource["fhir_json"], expected_result)
55+
56+
def test_process_row_invalid_action_flag(self):
57+
"""
58+
Test that process_row gives the expected output. These tests check that the row is valid and matches the expected output.
59+
"""
60+
Mock_Row = deepcopy(ROW_DETAILS)
61+
# setting up the invalid action flag other than 'NEW', 'UPDATE' or 'DELETE'
62+
Mock_Row['ACTION_FLAG'] = 'Invalid'
63+
64+
# call 'process_row' with required details
65+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
66+
67+
# validate if we got INVALID_ACTION_FLAG in response
68+
self.assertEqual(response['diagnostics']['error_type'], 'INVALID_ACTION_FLAG')
69+
70+
def test_process_row_missing_action_flag(self):
71+
"""
72+
Test that process_row gives the expected output. These tests check that the row is valid and matches the expected output.
73+
"""
74+
75+
Mock_Row = deepcopy(ROW_DETAILS)
76+
# removing action flag from row
77+
Mock_Row.pop('ACTION_FLAG')
78+
79+
# call 'process_row' with required details
80+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
81+
# validate if we got INVALID_ACTION_FLAG in response
82+
self.assertEqual(response['diagnostics']['error_type'], 'INVALID_ACTION_FLAG')
83+
84+
def test_process_row_missing_permission(self):
85+
"""
86+
Test that process_row gives the expected output. These tests check that the row is valid and matches the expected output.
87+
"""
88+
# only create and delete permission. Missing update
89+
allowed_operation = {'CREATE', 'DELETE'}
90+
# copy row data with Action_Flag = 'Update'
91+
Mock_Row = deepcopy(ROW_DETAILS)
92+
93+
# call 'process_row' with required details
94+
response = process_row("EMIS", allowed_operation, Mock_Row)
95+
96+
self.assertEqual(response['diagnostics']['error_type'], 'NO_PERMISSIONS')
97+
self.assertEqual(response['diagnostics']['statusCode'], 403)
98+
99+
def test_process_row_missing_unique_id(self):
100+
"""
101+
Test that process_row gives the expected output. These tests check that the row is valid and matches the expected output.
102+
"""
103+
# copy row data and remove 'UNIQUE_ID'
104+
Mock_Row = deepcopy(ROW_DETAILS)
105+
Mock_Row.pop('UNIQUE_ID')
106+
# call 'process_row' with required details
107+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
108+
109+
self.assertEqual(response['diagnostics']['error_type'], 'MISSING_UNIQUE_ID')
110+
self.assertEqual(response['diagnostics']['statusCode'], 400)
111+
112+
def test_process_row_missing_unique_id_uri(self):
113+
"""
114+
Test that process_row gives the expected output. These tests check that the row is valid and matches the expected output.
115+
"""
116+
# copy row data and remove 'UNIQUE_ID_URI'
117+
Mock_Row = deepcopy(ROW_DETAILS)
118+
Mock_Row.pop('UNIQUE_ID_URI')
119+
# call 'process_row' with required details
120+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
121+
122+
self.assertEqual(response['diagnostics']['error_message'], 'UNIQUE_ID or UNIQUE_ID_URI is missing')
123+
self.assertEqual(response['diagnostics']['statusCode'], 400)
124+
125+
if __name__ == '__main__':
126+
unittest.main()
127+
Lines changed: 46 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,46 @@
1+
import unittest
2+
from unittest.mock import patch
3+
from moto import mock_kinesis
4+
from boto3 import client as boto3_client
5+
from src.send_to_kinesis import send_to_kinesis
6+
7+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import (
8+
GenericSetUp,
9+
GenericTearDown,
10+
)
11+
from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import (
12+
REGION_NAME,
13+
)
14+
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT
15+
16+
with patch("os.environ", MOCK_ENVIRONMENT_DICT):
17+
from src.send_to_kinesis import send_to_kinesis
18+
19+
kinesis_client = boto3_client("kinesis", region_name=REGION_NAME)
20+
21+
22+
@mock_kinesis
23+
class TestSendToKinesis(unittest.TestCase):
24+
25+
def setUp(self) -> None:
26+
GenericSetUp(None, None, kinesis_client)
27+
28+
def tearDown(self) -> None:
29+
GenericTearDown(None, None, kinesis_client)
30+
31+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
32+
def test_send_to_kinesis_success(self):
33+
34+
kinesis_client.return_value = {'ResponseMetadata': {'HTTPStatusCode': 200}}
35+
36+
# arrange required parameters
37+
supplier = "test_supplier"
38+
message_body = {"key": "value"}
39+
vaccine_type = "test_vaccine"
40+
41+
result = send_to_kinesis(supplier, message_body, vaccine_type)
42+
self.assertTrue(result)
43+
44+
45+
if __name__ == '__main__':
46+
unittest.main()
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
"""Tests for the utils_for_recordprocessor module"""
2+
3+
import unittest
4+
5+
from src.unique_permission import get_unique_action_flags_from_s3
6+
from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import MockFileDetails, ValidMockFileContent
7+
8+
class TestGetUniqueFlagsFromS3(unittest.TestCase):
9+
def test_get_unique_action_flags_from_s3(self):
10+
csv_data = ValidMockFileContent.with_new_and_update_and_delete
11+
expected_output = {"NEW", "UPDATE", "DELETE"}
12+
13+
result = get_unique_action_flags_from_s3(csv_data)
14+
15+
self.assertEqual(result, expected_output)
16+
17+
18+
19+
if __name__ == '__main__':
20+
unittest.main()
21+

recordprocessor/tests/utils_for_recordprocessor_tests/utils_for_recordprocessor_tests.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,12 @@ def convert_string_to_dict_reader(data_string: str):
1010
"""Take a data string and convert it to a csv DictReader"""
1111
return DictReader(StringIO(data_string), delimiter="|")
1212

13+
def get_csv_file_dict_reader(s3_client, bucket_name: str, file_key: str) -> DictReader:
14+
"""Download the file from the S3 bucket and return it as a DictReader"""
15+
ack_file_csv_obj = s3_client.get_object(Bucket=bucket_name, Key=file_key)
16+
csv_content_string = ack_file_csv_obj["Body"].read().decode("utf-8")
17+
return DictReader(StringIO(csv_content_string), delimiter="|")
18+
1319

1420
class GenericSetUp:
1521
"""

0 commit comments

Comments
 (0)