Skip to content

Commit 4097465

Browse files
committed
unit tests
1 parent 1bdd7a3 commit 4097465

File tree

2 files changed

+120
-2
lines changed

2 files changed

+120
-2
lines changed

lambdas/filenameprocessor/tests/test_lambda_handler.py

Lines changed: 77 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -243,7 +243,7 @@ def test_lambda_handler_non_root_file(self):
243243
def test_lambda_handler_extended_attributes_success(self):
244244
"""
245245
Tests that for an extended attributes file (prefix starts with 'Vaccination_Extended_Attributes'):
246-
* The file is added to the audit table with a status of 'Processing'
246+
* The file is added to the audit table with a status of 'Processed'
247247
* The queue_name stored is the extended attribute identifier
248248
* The file is moved to the destination bucket under archive/
249249
* No SQS message is sent
@@ -278,16 +278,27 @@ def test_lambda_handler_extended_attributes_success(self):
278278
),
279279
),
280280
),
281+
patch(
282+
"file_name_processor.delete_file",
283+
side_effect=lambda src_bucket, key: (
284+
s3_client.delete_object(
285+
Bucket=BucketNames.SOURCE,
286+
Key=key,
287+
),
288+
),
289+
),
281290
):
282291
lambda_handler(self.make_event([self.make_record(test_cases[0].file_key)]), None)
283292

284-
# Assert audit table entry captured with Processing and queue_name set to the identifier
293+
# Assert audit table entry captured with Processed and queue_name set to the identifier
285294
table_items = self.get_audit_table_items()
286295
self.assertEqual(len(table_items), 1)
287296
item = table_items[0]
297+
print(json.dumps(item))
288298
self.assertEqual(item[AuditTableKeys.MESSAGE_ID]["S"], test_cases[0].message_id)
289299
self.assertEqual(item[AuditTableKeys.FILENAME]["S"], test_cases[0].file_key)
290300
self.assertEqual(item[AuditTableKeys.QUEUE_NAME]["S"], test_cases[0].ods_code + "_COVID")
301+
self.assertEqual(item[AuditTableKeys.STATUS]["S"], "Processed")
291302
self.assertEqual(item[AuditTableKeys.TIMESTAMP]["S"], test_cases[0].created_at_formatted_string)
292303
self.assertEqual(item[AuditTableKeys.EXPIRES_AT]["N"], str(test_cases[0].expires_at))
293304
# File should be moved to destination under archive/
@@ -300,6 +311,70 @@ def test_lambda_handler_extended_attributes_success(self):
300311
self.assert_no_sqs_message()
301312
self.assert_no_ack_file(test_cases[0])
302313

314+
def test_lambda_handler_extended_attributes_failure(self):
315+
"""
316+
Tests that for an extended attributes file (prefix starts with 'Vaccination_Extended_Attributes'):
317+
Where the file has not been copied to the destination bucket
318+
* The file is added to the audit table with a status of 'Failed'
319+
* The queue_name stored is the extended attribute identifier
320+
* The file is moved to the archive/ folder in the source bucket
321+
* No SQS message is sent
322+
* No ack file is created
323+
"""
324+
325+
# Build an extended attributes file.
326+
# FileDetails supports this when vaccine_type starts with 'Vaccination_Extended_Attributes'.
327+
test_cases = [MockFileDetails.extended_attributes_file]
328+
329+
# Put file in source bucket
330+
s3_client.put_object(
331+
Bucket=BucketNames.SOURCE,
332+
Key=test_cases[0].file_key,
333+
Body=MOCK_EXTENDED_ATTRIBUTES_FILE_CONTENT,
334+
)
335+
336+
# Patch uuid4 (message id), the identifier extraction, and don't move the file
337+
with (
338+
patch("file_name_processor.uuid4", return_value=test_cases[0].message_id),
339+
patch(
340+
"file_name_processor.validate_extended_attributes_file_key",
341+
return_value=test_cases[0].ods_code + "_COVID",
342+
),
343+
patch(
344+
"file_name_processor.copy_file_outside_bucket",
345+
side_effect=lambda src_bucket, key, dst_bucket, dst_key: ( # effectively do nothing
346+
None,
347+
),
348+
),
349+
):
350+
lambda_handler(self.make_event([self.make_record(test_cases[0].file_key)]), None)
351+
352+
# Assert audit table entry captured with Failed and queue_name set to the identifier.
353+
# Assert that the ClientError message is a 404 Not Found.
354+
table_items = self.get_audit_table_items()
355+
self.assertEqual(len(table_items), 1)
356+
item = table_items[0]
357+
print(json.dumps(item))
358+
self.assertEqual(item[AuditTableKeys.MESSAGE_ID]["S"], test_cases[0].message_id)
359+
self.assertEqual(item[AuditTableKeys.FILENAME]["S"], test_cases[0].file_key)
360+
self.assertEqual(item[AuditTableKeys.QUEUE_NAME]["S"], test_cases[0].ods_code + "_COVID")
361+
self.assertEqual(item[AuditTableKeys.TIMESTAMP]["S"], test_cases[0].created_at_formatted_string)
362+
self.assertEqual(item[AuditTableKeys.STATUS]["S"], "Failed")
363+
self.assertEqual(
364+
item[AuditTableKeys.ERROR_DETAILS]["S"],
365+
"An error occurred (404) when calling the HeadObject operation: Not Found",
366+
)
367+
self.assertEqual(item[AuditTableKeys.EXPIRES_AT]["N"], str(test_cases[0].expires_at))
368+
# File should be moved to source under archive/
369+
dest_key = f"archive/{test_cases[0].file_key}"
370+
print(f" destination file is at {s3_client.list_objects(Bucket=BucketNames.SOURCE)}")
371+
retrieved = s3_client.get_object(Bucket=BucketNames.SOURCE, Key=dest_key)
372+
self.assertIsNotNone(retrieved)
373+
374+
# No SQS and no ack file
375+
self.assert_no_sqs_message()
376+
self.assert_no_ack_file(test_cases[0])
377+
303378
@patch("elasticache.get_redis_client")
304379
def test_lambda_invalid_file_key_no_other_files_in_queue(self, mock_get_redis_client):
305380
"""

lambdas/shared/tests/test_common/test_s3_utils.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from unittest.mock import patch
55

66
import boto3
7+
from botocore.exceptions import ClientError
78
from moto import mock_aws
89

910
from common import aws_s3_utils
@@ -113,3 +114,45 @@ def test_move_file_outside_bucket_copies_then_deletes(self):
113114
# Assert source object was deleted
114115
with self.assertRaises(self.s3.exceptions.NoSuchKey):
115116
self.s3.get_object(Bucket=self.source_bucket, Key=source_key)
117+
118+
def test_is_file_in_bucket(self):
119+
"""File should be present in destination bucket"""
120+
file_key = "src/move_file_test.csv"
121+
122+
# Put an object in the source bucket
123+
body_content = b"dummy file content"
124+
self.s3.put_object(Bucket=self.source_bucket, Key=file_key, Body=body_content)
125+
126+
# Should raise no exception
127+
aws_s3_utils.is_file_in_bucket(
128+
bucket_name=self.source_bucket,
129+
file_key=file_key,
130+
)
131+
132+
def test_is_file_not_in_bucket(self):
133+
"""File should not be present in source bucket"""
134+
file_key = "src/move_file_test.csv"
135+
136+
# Don't put the object in the source bucket
137+
138+
# Should raise an exception
139+
with self.assertRaises(ClientError):
140+
aws_s3_utils.is_file_in_bucket(
141+
bucket_name=self.source_bucket,
142+
file_key=file_key,
143+
)
144+
145+
def test_is_file_in_wrong_bucket(self):
146+
"""File should not be present in destination bucket"""
147+
file_key = "src/move_file_test.csv"
148+
149+
# Put an object in the source bucket
150+
body_content = b"dummy file content"
151+
self.s3.put_object(Bucket=self.source_bucket, Key=file_key, Body=body_content)
152+
153+
# Should raise an exception
154+
with self.assertRaises(ClientError):
155+
aws_s3_utils.is_file_in_bucket(
156+
bucket_name=self.destination_bucket,
157+
file_key=file_key,
158+
)

0 commit comments

Comments
 (0)