Skip to content

Commit 82cd341

Browse files
committed
unit tests, mock dynamodb, refactoring
1 parent 228eb92 commit 82cd341

File tree

2 files changed

+128
-19
lines changed

2 files changed

+128
-19
lines changed

ack_backend/src/update_ack_file.py

Lines changed: 27 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -66,6 +66,25 @@ def obtain_current_ack_content(temp_ack_file_key: str) -> StringIO:
6666
return accumulated_csv_content
6767

6868

69+
def handle_ingestion_complete(
70+
temp_ack_file_key: str,
71+
message_id: str,
72+
supplier: str,
73+
vaccine_type: str,
74+
archive_ack_file_key: str,
75+
file_key: str,
76+
) -> None:
77+
"""Handles processing where the complete file has been ingested"""
78+
move_file(ACK_BUCKET_NAME, temp_ack_file_key, archive_ack_file_key)
79+
move_file(SOURCE_BUCKET_NAME, f"processing/{file_key}", f"archive/{file_key}")
80+
# Update the audit table and invoke the filename lambda with next file in the queue (if one exists)
81+
change_audit_table_status_to_processed(file_key, message_id)
82+
supplier_queue = f"{supplier}_{vaccine_type}"
83+
next_queued_file_details = get_next_queued_file_details(supplier_queue)
84+
if next_queued_file_details:
85+
invoke_filename_lambda(next_queued_file_details["filename"], next_queued_file_details["message_id"])
86+
87+
6988
@upload_ack_file_logging_decorator
7089
def upload_ack_file(
7190
temp_ack_file_key: str,
@@ -89,16 +108,14 @@ def upload_ack_file(
89108
row_count_destination = get_row_count(ACK_BUCKET_NAME, temp_ack_file_key)
90109
# TODO: Should we check for > and if so what handling is required
91110
if row_count_destination == row_count_source:
92-
move_file(ACK_BUCKET_NAME, temp_ack_file_key, archive_ack_file_key)
93-
move_file(SOURCE_BUCKET_NAME, f"processing/{file_key}", f"archive/{file_key}")
94-
95-
# Update the audit table and invoke the filename lambda with next file in the queue (if one exists)
96-
change_audit_table_status_to_processed(file_key, message_id)
97-
supplier_queue = f"{supplier}_{vaccine_type}"
98-
next_queued_file_details = get_next_queued_file_details(supplier_queue)
99-
if next_queued_file_details:
100-
invoke_filename_lambda(next_queued_file_details["filename"], next_queued_file_details["message_id"])
101-
111+
handle_ingestion_complete(
112+
temp_ack_file_key,
113+
message_id,
114+
supplier,
115+
vaccine_type,
116+
archive_ack_file_key,
117+
file_key,
118+
)
102119
# Ingestion of this file is complete
103120
result = {
104121
"message_id": message_id,

ack_backend/tests/test_splunk_logging.py

Lines changed: 101 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@
55
import json
66
from io import StringIO
77
from contextlib import ExitStack
8-
from moto import mock_s3
8+
from moto import mock_s3, mock_dynamodb
99
from boto3 import client as boto3_client
1010

1111
from tests.utils.values_for_ack_backend_tests import (
@@ -19,18 +19,21 @@
1919
from tests.utils.utils_for_ack_backend_tests import generate_event
2020

2121
with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT):
22+
from clients import REGION_NAME
2223
from ack_processor import lambda_handler
2324

2425
s3_client = boto3_client("s3")
26+
dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME)
2527

2628

27-
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
2829
@mock_s3
30+
@mock_dynamodb
31+
@patch.dict("os.environ", MOCK_ENVIRONMENT_DICT)
2932
class TestLoggingDecorators(unittest.TestCase):
3033
"""Tests for the ack lambda logging decorators"""
3134

3235
def setUp(self):
33-
GenericSetUp(s3_client)
36+
GenericSetUp(s3_client, None, dynamodb_client)
3437

3538
# MOCK SOURCE FILE WITH 100 ROWS TO SIMULATE THE SCENARIO WHERE THE ACK FILE IS NO FULL.
3639
# TODO: Test all other scenarios.
@@ -59,8 +62,8 @@ def run(self, result=None):
5962
# Any uses of the logger in other files will confound the tests and should be patched here.
6063
patch("update_ack_file.logger"),
6164
# Time is incremented by 1.0 for each call to time.time for ease of testing.
62-
# Range is set to a large number (100) due to many calls being made to time.time for some tests.
63-
patch("logging_decorators.time.time", side_effect=[0.0 + i for i in range(100)]),
65+
# Range is set to a large number (300) due to many calls being made to time.time for some tests.
66+
patch("logging_decorators.time.time", side_effect=[0.0 + i for i in range(300)]),
6467
]
6568

6669
# Set up the ExitStack. Note that patches need to be explicitly started so that they will be applied even when
@@ -85,11 +88,16 @@ def extract_all_call_args_for_logger_error(self, mock_logger):
8588
"""Extracts all arguments for logger.error."""
8689
return [args[0] for args, _ in mock_logger.error.call_args_list]
8790

88-
def expected_lambda_handler_logs(self, success: bool, number_of_rows, diagnostics=None):
91+
def expected_lambda_handler_logs(self, success: bool, number_of_rows, ingestion_complete=False, diagnostics=None):
8992
"""Returns the expected logs for the lambda handler function."""
9093
# Mocking of timings is such that the time taken is 2 seconds for each row,
91-
# plus 2 seconds for the handler if it succeeds (i.e. it calls update_ack_file) or 1 second if it doesn't
92-
time_taken = f"{number_of_rows * 2 + 2}.0s" if success else f"{number_of_rows * 2 + 1}.0s"
94+
# plus 2 seconds for the handler if it succeeds (i.e. it calls update_ack_file) or 1 second if it doesn't;
95+
# plus an extra second if ingestion is complete
96+
if success:
97+
time_taken = f"{number_of_rows * 2 + 3}.0s" if ingestion_complete else f"{number_of_rows * 2 + 2}.0s"
98+
else:
99+
time_taken = f"{number_of_rows * 2 + 1}.0s"
100+
93101
base_log = (
94102
ValidValues.lambda_handler_success_expected_log
95103
if success
@@ -139,7 +147,7 @@ def test_splunk_logging_missing_data(self):
139147
expected_first_logger_info_data = {**InvalidValues.Logging_with_no_values}
140148

141149
expected_first_logger_error_data = self.expected_lambda_handler_logs(
142-
success=False, number_of_rows=1, diagnostics="'NoneType' object has no attribute 'replace'"
150+
success=False, number_of_rows=1, ingestion_complete=False, diagnostics="'NoneType' object has no attribute 'replace'"
143151
)
144152

145153
first_logger_info_call_args = json.loads(self.extract_all_call_args_for_logger_info(mock_logger)[0])
@@ -305,6 +313,90 @@ def test_splunk_logging_multiple_with_diagnostics(
305313
]
306314
)
307315

316+
def test_splunk_update_ack_file_not_logged(self):
317+
self.maxDiff = None
318+
"""Tests that update_ack_file is not logged if we have sent acks for less than the whole file"""
319+
# send 98 messages
320+
messages = []
321+
for i in range(1, 99):
322+
message_value = "test" + str(i)
323+
messages.append({"row_id": message_value})
324+
325+
with (
326+
patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose,
327+
patch("logging_decorators.logger") as mock_logger,
328+
patch("update_ack_file.handle_ingestion_complete") as mock_handle_ingestion_complete, # noqa: E999
329+
):
330+
result = lambda_handler(generate_event(messages), context={})
331+
332+
self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS)
333+
334+
expected_secondlast_logger_info_data = {
335+
**ValidValues.mock_message_expected_log_value,
336+
"message_id": "test98",
337+
}
338+
expected_last_logger_info_data = self.expected_lambda_handler_logs(success=True, number_of_rows=98)
339+
340+
all_logger_info_call_args = self.extract_all_call_args_for_logger_info(mock_logger)
341+
secondlast_logger_info_call_args = json.loads(all_logger_info_call_args[97])
342+
last_logger_info_call_args = json.loads(all_logger_info_call_args[98])
343+
344+
self.assertEqual(secondlast_logger_info_call_args, expected_secondlast_logger_info_data)
345+
self.assertEqual(last_logger_info_call_args, expected_last_logger_info_data)
346+
347+
mock_send_log_to_firehose.assert_has_calls(
348+
[
349+
call(secondlast_logger_info_call_args),
350+
call(last_logger_info_call_args),
351+
]
352+
)
353+
mock_handle_ingestion_complete.assert_not_called()
354+
355+
def test_splunk_update_ack_file_logged(self):
356+
"""Tests that update_ack_file is logged if we have sent acks for the whole file"""
357+
# send 99 messages
358+
messages = []
359+
for i in range(1, 100):
360+
message_value = "test" + str(i)
361+
messages.append({"row_id": message_value})
362+
363+
with (
364+
patch("logging_decorators.send_log_to_firehose") as mock_send_log_to_firehose,
365+
patch("logging_decorators.logger") as mock_logger,
366+
patch("update_ack_file.handle_ingestion_complete") as mock_handle_ingestion_complete, # noqa: E999
367+
):
368+
result = lambda_handler(generate_event(messages), context={})
369+
370+
self.assertEqual(result, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS)
371+
372+
expected_thirdlast_logger_info_data = {
373+
**ValidValues.mock_message_expected_log_value,
374+
"message_id": "test99",
375+
}
376+
expected_secondlast_logger_info_data = {
377+
**ValidValues.upload_ack_file_expected_log,
378+
"message_id": "test1",
379+
"time_taken": "1.0s"
380+
}
381+
expected_last_logger_info_data = self.expected_lambda_handler_logs(success=True, number_of_rows=99, ingestion_complete=True)
382+
383+
all_logger_info_call_args = self.extract_all_call_args_for_logger_info(mock_logger)
384+
thirdlast_logger_info_call_args = json.loads(all_logger_info_call_args[98])
385+
secondlast_logger_info_call_args = json.loads(all_logger_info_call_args[99])
386+
last_logger_info_call_args = json.loads(all_logger_info_call_args[100])
387+
self.assertEqual(thirdlast_logger_info_call_args, expected_thirdlast_logger_info_data)
388+
self.assertEqual(secondlast_logger_info_call_args, expected_secondlast_logger_info_data)
389+
self.assertEqual(last_logger_info_call_args, expected_last_logger_info_data)
390+
391+
mock_send_log_to_firehose.assert_has_calls(
392+
[
393+
call(thirdlast_logger_info_call_args),
394+
call(secondlast_logger_info_call_args),
395+
call(last_logger_info_call_args),
396+
]
397+
)
398+
mock_handle_ingestion_complete.assert_called()
399+
308400

309401
if __name__ == "__main__":
310402
unittest.main()

0 commit comments

Comments
 (0)