Skip to content

Commit 2fca6b7

Browse files
committed
test code review
1 parent e9bac32 commit 2fca6b7

File tree

2 files changed

+63
-26
lines changed

2 files changed

+63
-26
lines changed

filenameprocessor/src/file_name_processor.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@
1313
from send_sqs_message import make_and_send_sqs_message
1414
from make_and_upload_ack_file import make_and_upload_the_ack_file
1515
from audit_table import upsert_audit_table, get_next_queued_file_details, ensure_file_is_not_a_duplicate
16-
from clients import logger
16+
from clients import logger, s3_client
1717
from logging_decorator import logging_decorator
1818
from supplier_permissions import validate_vaccine_type_permissions
1919
from errors import (
@@ -121,7 +121,7 @@ def handle_record(record) -> dict:
121121
make_and_upload_the_ack_file(message_id, file_key, message_delivered, created_at_formatted_string)
122122

123123
# Move file to archive
124-
move_file(bucket_name, file_key, f"archive/{file_key}")
124+
move_file(s3_client, bucket_name, file_key, f"archive/{file_key}")
125125

126126
# If there is another file waiting in the queue, invoke the filename lambda with the next file
127127
next_queued_file_details = get_next_queued_file_details(queue_name=f"{supplier}_{vaccine_type}")

filenameprocessor/tests/test_logging_decorator.py

Lines changed: 61 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
"""Tests for the logging_decorator and its helper functions"""
22

33
import unittest
4-
from unittest.mock import patch
4+
from unittest.mock import patch, MagicMock
5+
from datetime import datetime
56
import json
67
from copy import deepcopy
78
from contextlib import ExitStack
8-
from boto3 import client as boto3_client
9+
import boto3
910
from botocore.exceptions import ClientError
1011
from moto import mock_s3, mock_firehose, mock_sqs, mock_dynamodb
1112

@@ -21,18 +22,11 @@
2122
from logging_decorator import send_log_to_firehose, generate_and_send_logs
2223
from constants import PERMISSIONS_CONFIG_FILE_KEY
2324

24-
s3_client = boto3_client("s3", region_name=REGION_NAME)
25-
sqs_client = boto3_client("sqs", region_name=REGION_NAME)
26-
firehose_client = boto3_client("firehose", region_name=REGION_NAME)
27-
dynamodb_client = boto3_client("dynamodb", region_name=REGION_NAME)
2825

2926
FILE_DETAILS = MockFileDetails.emis_flu
3027
MOCK_VACCINATION_EVENT = {
3128
"Records": [{"s3": {"bucket": {"name": BucketNames.SOURCE}, "object": {"key": FILE_DETAILS.file_key}}}]
3229
}
33-
MOCK_CONFIG_EVENT = {
34-
"Records": [{"s3": {"bucket": {"name": BucketNames.CONFIG}, "object": {"key": PERMISSIONS_CONFIG_FILE_KEY}}}]
35-
}
3630

3731

3832
@mock_s3
@@ -45,12 +39,28 @@ class TestLoggingDecorator(unittest.TestCase):
4539

4640
def setUp(self):
4741
"""Set up the mock AWS environment and upload a valid FLU/EMIS file example"""
48-
GenericSetUp(s3_client, firehose_client, sqs_client, dynamodb_client)
49-
s3_client.put_object(Bucket=BucketNames.SOURCE, Key=FILE_DETAILS.file_key)
42+
self.s3_client = boto3.client("s3", region_name=REGION_NAME)
43+
self.sqs_client = boto3.client("sqs", region_name=REGION_NAME)
44+
self.firehose_client = boto3.client("firehose", region_name=REGION_NAME)
45+
self.dynamodb_client = boto3.client("dynamodb", region_name=REGION_NAME)
46+
47+
GenericSetUp(
48+
s3_client=self.s3_client,
49+
firehose_client=self.firehose_client,
50+
sqs_client=self.sqs_client,
51+
dynamodb_client=self.dynamodb_client
52+
)
53+
54+
self.s3_client.put_object(Bucket=BucketNames.SOURCE, Key=FILE_DETAILS.file_key)
5055

5156
def tearDown(self):
5257
"""Clean the mock AWS environment"""
53-
GenericTearDown(s3_client, firehose_client, sqs_client, dynamodb_client)
58+
GenericTearDown(
59+
s3_client=self.s3_client,
60+
firehose_client=self.firehose_client,
61+
sqs_client=self.sqs_client,
62+
dynamodb_client=self.dynamodb_client
63+
)
5464

5565
def run(self, result=None):
5666
"""
@@ -143,17 +153,40 @@ def test_generate_and_send_logs(self):
143153
self.assertEqual(log_data, expected_log_data)
144154
mock_send_log_to_firehose.assert_called_once_with(expected_log_data)
145155

156+
146157
def test_logging_successful_validation(self):
147158
"""Tests that the correct logs are sent to cloudwatch and splunk when file validation is successful"""
148-
# Mock full permissions so that validation will pass
149159
permissions_config_content = generate_permissions_config_content(deepcopy(FILE_DETAILS.permissions_config))
150-
with ( # noqa: E999
151-
patch("file_name_processor.uuid4", return_value=FILE_DETAILS.message_id), # noqa: E999
152-
patch("elasticache.redis_client.get", return_value=permissions_config_content), # noqa: E999
153-
patch("logging_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999
154-
patch("logging_decorator.logger") as mock_logger, # noqa: E999
155-
): # noqa: E999
156-
lambda_handler(MOCK_VACCINATION_EVENT, context=None)
160+
161+
# Mock Redis
162+
mock_redis_client = MagicMock()
163+
mock_redis_client.get.return_value = permissions_config_content
164+
mock_redis_client.hget.return_value = json.dumps(FILE_DETAILS.permissions_config["EMIS"])
165+
166+
fixed_datetime = datetime(2023, 1, 1, 12, 0, 0)
167+
fixed_time = 1000000.0
168+
169+
# Mock DynamoDB
170+
mock_table = MagicMock()
171+
mock_table.query.return_value = {"Items": []}
172+
self.dynamodb_client = MagicMock()
173+
self.dynamodb_client.Table.return_value = mock_table
174+
175+
mock_sqs_client = MagicMock()
176+
mock_sqs_client.send_message.return_value = {"MessageId": "mock-message-id"}
177+
178+
mock_dynamodb_resource = MagicMock()
179+
mock_dynamodb_resource.Table.return_value = mock_table
180+
181+
with (
182+
patch("logging_decorator.datetime") as mock_datetime,
183+
patch("logging_decorator.time") as mock_time,
184+
patch("logging_decorator.logger") as mock_logger,
185+
patch("logging_decorator.send_log_to_firehose") as mock_firehose,
186+
):
187+
mock_datetime.now.return_value = fixed_datetime
188+
mock_time.time.side_effect = [fixed_time, fixed_time + 1.0]
189+
157190

158191
expected_log_data = {
159192
"function_name": "filename_processor_handle_record",
@@ -168,9 +201,13 @@ def test_logging_successful_validation(self):
168201
}
169202

170203
log_data = json.loads(mock_logger.info.call_args[0][0])
204+
print("Actual log_data:", log_data)
205+
print("Expected:", expected_log_data)
171206
self.assertEqual(log_data, expected_log_data)
172-
173-
mock_send_log_to_firehose.assert_called_once_with(log_data)
207+
mock_client.put_record.assert_called_once_with(
208+
DeliveryStreamName="immunisation-fhir-api-internal-dev-splunk-firehose",
209+
Record={"Data": json.dumps({"event": expected_log_data}).encode("utf-8")},
210+
)
174211

175212
def test_logging_failed_validation(self):
176213
"""Tests that the correct logs are sent to cloudwatch and splunk when file validation fails"""
@@ -179,7 +216,7 @@ def test_logging_failed_validation(self):
179216

180217
with ( # noqa: E999
181218
patch("file_name_processor.uuid4", return_value=FILE_DETAILS.message_id), # noqa: E999
182-
patch("elasticache.redis_client.get", return_value=permissions_config_content), # noqa: E999
219+
patch("supplier_permissions.redis_client.get", return_value=permissions_config_content), # noqa: E999
183220
patch("logging_decorator.send_log_to_firehose") as mock_send_log_to_firehose, # noqa: E999
184221
patch("logging_decorator.logger") as mock_logger, # noqa: E999
185222
): # noqa: E999
@@ -214,7 +251,7 @@ def test_logging_throws_exception(self):
214251

215252
with (
216253
patch("file_name_processor.uuid4", return_value=FILE_DETAILS.message_id),
217-
patch("elasticache.redis_client.get", return_value=permissions_config_content),
254+
patch("supplier_permissions.redis_client.get", return_value=permissions_config_content),
218255
patch("logging_decorator.firehose_client.put_record", side_effect=firehose_exception),
219256
patch("logging_decorator.logger") as mock_logger,
220257
):

0 commit comments

Comments
 (0)