Skip to content

Commit 7300329

Browse files
authored
Merge pull request #347 from NHSDigital/AMB-2289-extend-unit-test
updated unit tests for recordprocessor
2 parents 647f386 + f65c83a commit 7300329

File tree

9 files changed

+362
-5
lines changed

9 files changed

+362
-5
lines changed

recordprocessor/src/convert_to_fhir_imms_resource.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
""""Decorators to add the relevant fields to the FHIR immunization resource from the batch stream"""
1+
""" "Decorators to add the relevant fields to the FHIR immunization resource from the batch stream"""
22

33
from typing import List, Callable, Dict
44
from utils_for_fhir_conversion import _is_not_empty, Generate, Add, Convert

recordprocessor/src/models/utils.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -43,12 +43,14 @@ def to_operation_outcome(self) -> dict:
4343
@dataclass
4444
class ImmunizationApiUnhandledError(RuntimeError):
4545
"""An error that occurs when the ImmunizationApi throws an unhandled error."""
46+
4647
request: dict
4748

4849

4950
@dataclass
5051
class ImmunizationApiError(RuntimeError):
5152
"""An error that occurs when the ImmunizationApi returns a non-200 status code."""
53+
5254
status_code: int
5355
request: dict
5456
response: Union[dict, str]

recordprocessor/src/send_to_kinesis.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ def send_to_kinesis(supplier: str, message_body: dict, vaccine_type: str) -> boo
1515
Data=json.dumps(message_body, ensure_ascii=False),
1616
PartitionKey=f"{supplier}_{vaccine_type}",
1717
)
18+
return True
1819
except ClientError as error:
1920
logger.error("Error sending message to Kinesis: %s", error)
2021
raise

recordprocessor/src/unique_permission.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,7 @@ def get_unique_action_flags_from_s3(csv_data):
77
Reads the CSV file from an S3 bucket and returns a set of unique ACTION_FLAG values.
88
"""
99
# Load content into a pandas DataFrame
10-
df = pd.read_csv(StringIO(csv_data), delimiter='|', usecols=["ACTION_FLAG"])
10+
df = pd.read_csv(StringIO(csv_data), delimiter="|", usecols=["ACTION_FLAG"])
1111
# Get unique ACTION_FLAG values in one step
1212
unique_action_flags = set(df["ACTION_FLAG"].str.upper().unique())
1313
return unique_action_flags

recordprocessor/tests/test_make_and_upload_ack_file.py

Lines changed: 82 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,32 @@
22

33
import unittest
44
from unittest.mock import patch
5-
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT
5+
from copy import deepcopy
6+
from boto3 import client as boto3_client
7+
from moto import mock_s3
8+
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT, BucketNames
9+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import get_csv_file_dict_reader
10+
from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import MockFileDetails
611

712
with patch("os.environ", MOCK_ENVIRONMENT_DICT):
8-
from make_and_upload_ack_file import make_ack_data
13+
from src.make_and_upload_ack_file import make_ack_data, upload_ack_file, make_and_upload_ack_file
14+
from clients import REGION_NAME
915

16+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import GenericSetUp, GenericTearDown
1017

18+
s3_client = boto3_client("s3", region_name=REGION_NAME)
19+
20+
FILE_DETAILS = MockFileDetails.flu_emis
21+
22+
23+
@mock_s3
24+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
1125
class TestMakeAndUploadAckFile(unittest.TestCase):
1226
"Tests for make_and_upload_ack_file functions"
1327

1428
def setUp(self) -> None:
1529
self.message_id = "test_id"
16-
self.created_at_formatted_string = "20200101T12345600"
30+
self.created_at_formatted_string = "20211120T12000000"
1731
self.ack_data_validation_passed_and_message_delivered = {
1832
"MESSAGE_HEADER_ID": self.message_id,
1933
"HEADER_RESPONSE_CODE": "Success",
@@ -57,6 +71,11 @@ def setUp(self) -> None:
5771
"MESSAGE_DELIVERY": False,
5872
}
5973

74+
GenericSetUp(s3_client)
75+
76+
def tearDown(self):
77+
GenericTearDown(s3_client)
78+
6079
def test_make_ack_data(self):
6180
"Tests make_ack_data makes correct ack data based on the input args"
6281
# Test case tuples are stuctured as (validation_passed, message_delivered, expected_result)
@@ -76,6 +95,66 @@ def test_make_ack_data(self):
7695
expected_result,
7796
)
7897

98+
def test_upload_ack_file_success(self):
99+
"""Test that upload_ack_file successfully uploads the ack file"""
100+
101+
upload_ack_file(
102+
file_key=FILE_DETAILS.file_key,
103+
ack_data=deepcopy(self.ack_data_validation_passed_and_message_delivered),
104+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
105+
)
106+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_delivered)]
107+
# Note that the data downloaded from the CSV will contain the bool as a string
108+
expected_result[0]["MESSAGE_DELIVERY"] = "True"
109+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
110+
self.assertEqual(list(csv_dict_reader), expected_result)
111+
112+
def test_upload_ack_file_failure(self):
113+
"""Test that upload_ack_file failed to upload the ack file"""
114+
115+
upload_ack_file(
116+
file_key=FILE_DETAILS.file_key,
117+
ack_data=deepcopy(self.ack_data_validation_passed_and_message_not_delivered),
118+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
119+
)
120+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_not_delivered)]
121+
# Note that the data downloaded from the CSV will contain the bool as a string
122+
expected_result[0]["MESSAGE_DELIVERY"] = "False"
123+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
124+
self.assertEqual(list(csv_dict_reader), expected_result)
125+
126+
def test_make_and_upload_ack_file_success(self):
127+
"""Test that make_and_upload_ack_file uploads an ack file containing the correct values"""
128+
make_and_upload_ack_file(
129+
message_id=self.message_id,
130+
file_key=FILE_DETAILS.file_key,
131+
validation_passed=True,
132+
message_delivered=True,
133+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
134+
)
135+
136+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_delivered)]
137+
# Note that the data downloaded from the CSV will contain the bool as a string
138+
expected_result[0]["MESSAGE_DELIVERY"] = "True"
139+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
140+
self.assertEqual(list(csv_dict_reader), expected_result)
141+
142+
def test_make_and_upload_ack_file_failure(self):
143+
"""Test that make_and_upload_ack_file failed to upload an ack file containing the correct values"""
144+
make_and_upload_ack_file(
145+
message_id=self.message_id,
146+
file_key=FILE_DETAILS.file_key,
147+
validation_passed=True,
148+
message_delivered=False,
149+
created_at_formatted_string=FILE_DETAILS.created_at_formatted_string,
150+
)
151+
152+
expected_result = [deepcopy(self.ack_data_validation_passed_and_message_not_delivered)]
153+
# Note that the data downloaded from the CSV will contain the bool as a string
154+
expected_result[0]["MESSAGE_DELIVERY"] = "False"
155+
csv_dict_reader = get_csv_file_dict_reader(s3_client, BucketNames.DESTINATION, FILE_DETAILS.inf_ack_file_key)
156+
self.assertEqual(list(csv_dict_reader), expected_result)
157+
79158

80159
if __name__ == "__main__":
81160
unittest.main()
Lines changed: 203 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,203 @@
1+
"""Tests for the process_row module"""
2+
3+
import unittest
4+
from unittest.mock import patch
5+
from copy import deepcopy
6+
from boto3 import client as boto3_client
7+
from moto import mock_s3
8+
from decimal import Decimal
9+
10+
11+
from tests.utils_for_recordprocessor_tests.values_for_recordprocessor_tests import (
12+
MockFieldDictionaries,
13+
)
14+
15+
from tests.utils_for_recordprocessor_tests.utils_for_recordprocessor_tests import (
16+
GenericSetUp,
17+
GenericTearDown,
18+
)
19+
from tests.utils_for_recordprocessor_tests.mock_environment_variables import MOCK_ENVIRONMENT_DICT
20+
21+
with patch("os.environ", MOCK_ENVIRONMENT_DICT):
22+
# Do not attempt 'from src.mappings import Vaccine' as this imports a different instance of Vaccine
23+
# and tests will break
24+
from clients import REGION_NAME
25+
from src.process_row import process_row
26+
27+
s3_client = boto3_client("s3", region_name=REGION_NAME)
28+
ROW_DETAILS = MockFieldDictionaries.all_fields
29+
Allowed_Operations = {"CREATE", "UPDATE", "DELETE"}
30+
31+
32+
@mock_s3
33+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
34+
class TestProcessRow(unittest.TestCase):
35+
"""Tests for process_row"""
36+
37+
def setUp(self) -> None:
38+
GenericSetUp(s3_client)
39+
40+
def tearDown(self) -> None:
41+
GenericTearDown(s3_client)
42+
43+
def test_process_row_success(self):
44+
"""
45+
Test that process_row gives the expected output.
46+
These tests check that the row is valid and matches the expected output.
47+
"""
48+
# set the expected output from 'process_row' in case of success
49+
expected_result = {
50+
"resourceType": "Immunization",
51+
"status": "completed",
52+
"protocolApplied": [{"targetDisease": [], "doseNumberPositiveInt": 1}],
53+
"reasonCode": [{"coding": [{"system": "http://snomed.info/sct", "code": "1037351000000105"}]}],
54+
"recorded": "2024-09-04",
55+
"identifier": [{"value": "RSV_002", "system": "https://www.ravs.england.nhs.uk/"}],
56+
"patient": {"reference": "#Patient1"},
57+
"contained": [
58+
{
59+
"id": "Patient1",
60+
"resourceType": "Patient",
61+
"birthDate": "2008-02-17",
62+
"gender": "male",
63+
"address": [{"postalCode": "WD25 0DZ"}],
64+
"identifier": [{"system": "https://fhir.nhs.uk/Id/nhs-number", "value": "9732928395"}],
65+
"name": [{"family": "PEEL", "given": ["PHYLIS"]}],
66+
},
67+
{
68+
"resourceType": "Practitioner",
69+
"id": "Practitioner1",
70+
"name": [{"family": "O'Reilly", "given": ["Ellena"]}],
71+
},
72+
],
73+
"vaccineCode": {
74+
"coding": [
75+
{
76+
"system": "http://snomed.info/sct",
77+
"code": "42223111000001107",
78+
"display": "Quadrivalent influenza vaccine (split virion, inactivated)",
79+
}
80+
]
81+
},
82+
"manufacturer": {"display": "Sanofi Pasteur"},
83+
"expirationDate": "2024-09-15",
84+
"lotNumber": "BN92478105653",
85+
"extension": [
86+
{
87+
"url": "https://fhir.hl7.org.uk/StructureDefinition/Extension-UKCore-VaccinationProcedure",
88+
"valueCodeableConcept": {
89+
"coding": [
90+
{
91+
"system": "http://snomed.info/sct",
92+
"code": "956951000000104",
93+
"display": "RSV vaccination in pregnancy (procedure)",
94+
}
95+
]
96+
},
97+
}
98+
],
99+
"occurrenceDateTime": "2024-09-04T18:33:25+00:00",
100+
"primarySource": True,
101+
"site": {"coding": [{"system": "http://snomed.info/sct", "code": "368209003", "display": "Right arm"}]},
102+
"route": {
103+
"coding": [{"system": "http://snomed.info/sct", "code": "1210999013", "display": "Intradermal use"}]
104+
},
105+
"doseQuantity": {
106+
"value": Decimal("0.3"),
107+
"unit": "Inhalation - unit of product usage",
108+
"system": "http://snomed.info/sct",
109+
"code": "2622896019",
110+
},
111+
"performer": [
112+
{
113+
"actor": {
114+
"type": "Organization",
115+
"identifier": {"system": "https://fhir.nhs.uk/Id/ods-organization-code", "value": "RVVKC"},
116+
}
117+
},
118+
{"actor": {"reference": "#Practitioner1"}},
119+
],
120+
"location": {"identifier": {"value": "RJC02", "system": "https://fhir.nhs.uk/Id/ods-organization-code"}},
121+
}
122+
123+
# call 'process_row' with required details
124+
imms_fhir_resource = process_row("EMIS", Allowed_Operations, ROW_DETAILS)
125+
# validate if the response with expected result
126+
self.assertDictEqual(imms_fhir_resource["fhir_json"], expected_result)
127+
128+
def test_process_row_invalid_action_flag(self):
129+
"""
130+
Test that process_row gives the expected output.
131+
These tests check that the row is valid and matches the expected output.
132+
"""
133+
Mock_Row = deepcopy(ROW_DETAILS)
134+
# setting up the invalid action flag other than 'NEW', 'UPDATE' or 'DELETE'
135+
Mock_Row["ACTION_FLAG"] = "Invalid"
136+
137+
# call 'process_row' with required details
138+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
139+
140+
# validate if we got INVALID_ACTION_FLAG in response
141+
self.assertEqual(response["diagnostics"]["error_type"], "INVALID_ACTION_FLAG")
142+
143+
def test_process_row_missing_action_flag(self):
144+
"""
145+
Test that process_row gives the expected output.
146+
These tests check that the row is valid and matches the expected output.
147+
"""
148+
149+
Mock_Row = deepcopy(ROW_DETAILS)
150+
# removing action flag from row
151+
Mock_Row.pop("ACTION_FLAG")
152+
153+
# call 'process_row' with required details
154+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
155+
# validate if we got INVALID_ACTION_FLAG in response
156+
self.assertEqual(response["diagnostics"]["error_type"], "INVALID_ACTION_FLAG")
157+
158+
def test_process_row_missing_permission(self):
159+
"""
160+
Test that process_row gives the expected output.
161+
These tests check that the row is valid and matches the expected output.
162+
"""
163+
# only create and delete permission. Missing update
164+
allowed_operation = {"CREATE", "DELETE"}
165+
# copy row data with Action_Flag = 'Update'
166+
Mock_Row = deepcopy(ROW_DETAILS)
167+
168+
# call 'process_row' with required details
169+
response = process_row("EMIS", allowed_operation, Mock_Row)
170+
self.assertEqual(response["diagnostics"]["error_type"], "NO_PERMISSIONS")
171+
self.assertEqual(response["diagnostics"]["statusCode"], 403)
172+
173+
def test_process_row_missing_unique_id(self):
174+
"""
175+
Test that process_row gives the expected output.
176+
These tests check that the row is valid and matches the expected output.
177+
"""
178+
# copy row data and remove 'UNIQUE_ID'
179+
Mock_Row = deepcopy(ROW_DETAILS)
180+
Mock_Row.pop("UNIQUE_ID")
181+
# call 'process_row' with required details
182+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
183+
184+
self.assertEqual(response["diagnostics"]["error_type"], "MISSING_UNIQUE_ID")
185+
self.assertEqual(response["diagnostics"]["statusCode"], 400)
186+
187+
def test_process_row_missing_unique_id_uri(self):
188+
"""
189+
Test that process_row gives the expected output.
190+
These tests check that the row is valid and matches the expected output.
191+
"""
192+
# copy row data and remove 'UNIQUE_ID_URI'
193+
Mock_Row = deepcopy(ROW_DETAILS)
194+
Mock_Row.pop("UNIQUE_ID_URI")
195+
# call 'process_row' with required details
196+
response = process_row("EMIS", Allowed_Operations, Mock_Row)
197+
198+
self.assertEqual(response["diagnostics"]["error_message"], "UNIQUE_ID or UNIQUE_ID_URI is missing")
199+
self.assertEqual(response["diagnostics"]["statusCode"], 400)
200+
201+
202+
if __name__ == "__main__":
203+
unittest.main()

0 commit comments

Comments
 (0)