Skip to content

Commit c002b52

Browse files
committed
move_file
1 parent ddf86a0 commit c002b52

File tree

12 files changed

+258
-292
lines changed

12 files changed

+258
-292
lines changed

lambdas/ack_backend/src/update_ack_file.py

Lines changed: 1 addition & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@
66

77
from audit_table import change_audit_table_status_to_processed
88
from common.clients import get_s3_client, logger
9+
from common.utils import move_file
910
from constants import (
1011
ACK_HEADERS,
1112
BATCH_FILE_ARCHIVE_DIR,
@@ -123,15 +124,3 @@ def update_ack_file(
123124

124125
get_s3_client().upload_fileobj(csv_file_like_object, ack_bucket_name, temp_ack_file_key)
125126
logger.info("Ack file updated to %s: %s", ack_bucket_name, archive_ack_file_key)
126-
127-
128-
def move_file(bucket_name: str, source_file_key: str, destination_file_key: str) -> None:
129-
"""Moves a file from one location to another within a single S3 bucket by copying and then deleting the file."""
130-
s3_client = get_s3_client()
131-
s3_client.copy_object(
132-
Bucket=bucket_name,
133-
CopySource={"Bucket": bucket_name, "Key": source_file_key},
134-
Key=destination_file_key,
135-
)
136-
s3_client.delete_object(Bucket=bucket_name, Key=source_file_key)
137-
logger.info("File moved from %s to %s", source_file_key, destination_file_key)

lambdas/ack_backend/tests/test_splunk_logging.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -62,6 +62,7 @@ def run(self, result=None):
6262
# The logging_decorator.logger is patched individually in each test to allow for assertions to be made.
6363
# Any uses of the logger in other files will confound the tests and should be patched here.
6464
patch("update_ack_file.logger"),
65+
patch("common.utils.logger"),
6566
# Time is incremented by 1.0 for each call to time.time for ease of testing.
6667
# Range is set to a large number (300) due to many calls being made to time.time for some tests.
6768
patch(

lambdas/ack_backend/tests/test_update_ack_file_flow.py

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -69,26 +69,3 @@ def test_audit_table_updated_correctly_when_ack_process_complete(self):
6969

7070
# Assert: Only check audit table update
7171
self.mock_change_audit_status.assert_called_once_with(file_key, message_id)
72-
73-
def test_move_file(self):
74-
"""VED-167 test that the file has been moved to the appropriate location"""
75-
bucket_name = "move-bucket"
76-
file_key = "src/move_file_test.csv"
77-
dest_key = "dest/move_file_test.csv"
78-
self.s3_client.create_bucket(
79-
Bucket=bucket_name,
80-
CreateBucketConfiguration={"LocationConstraint": "eu-west-2"},
81-
)
82-
self.s3_client.put_object(Bucket=bucket_name, Key=file_key, Body="dummy content")
83-
update_ack_file.move_file(bucket_name, file_key, dest_key)
84-
# Assert the destination object exists
85-
response = self.s3_client.get_object(Bucket=bucket_name, Key=dest_key)
86-
content = response["Body"].read().decode()
87-
self.assertEqual(content, "dummy content")
88-
89-
# Assert the source object no longer exists
90-
with self.assertRaises(self.s3_client.exceptions.NoSuchKey):
91-
self.s3_client.get_object(Bucket=bucket_name, Key=file_key)
92-
93-
# Logger assertion (if logger is mocked)
94-
self.mock_logger.info.assert_called_with("File moved from %s to %s", file_key, dest_key)

lambdas/filenameprocessor/src/file_name_processor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from common.clients import STREAM_NAME, get_s3_client, logger
1414
from common.log_decorator import logging_decorator
1515
from common.models.errors import UnhandledAuditTableError
16+
from common.utils import move_file
1617
from constants import (
1718
ERROR_TYPE_TO_STATUS_CODE_MAP,
1819
SOURCE_BUCKET_NAME,
@@ -28,7 +29,7 @@
2829
)
2930
from send_sqs_message import make_and_send_sqs_message
3031
from supplier_permissions import validate_vaccine_type_permissions
31-
from utils_for_filenameprocessor import get_creation_and_expiry_times, move_file
32+
from utils_for_filenameprocessor import get_creation_and_expiry_times
3233

3334

3435
# NOTE: logging_decorator is applied to handle_record function, rather than lambda_handler, because

lambdas/filenameprocessor/src/utils_for_filenameprocessor.py

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,6 @@
22

33
from datetime import timedelta
44

5-
from common.clients import get_s3_client, logger
65
from constants import AUDIT_TABLE_TTL_DAYS
76

87

@@ -12,15 +11,3 @@ def get_creation_and_expiry_times(s3_response: dict) -> (str, int):
1211
expiry_datetime = creation_datetime + timedelta(days=int(AUDIT_TABLE_TTL_DAYS))
1312
expiry_timestamp = int(expiry_datetime.timestamp())
1413
return creation_datetime.strftime("%Y%m%dT%H%M%S00"), expiry_timestamp
15-
16-
17-
def move_file(bucket_name: str, source_file_key: str, destination_file_key: str) -> None:
18-
"""Moves a file from one location to another within a single S3 bucket by copying and then deleting the file."""
19-
s3_client = get_s3_client()
20-
s3_client.copy_object(
21-
Bucket=bucket_name,
22-
CopySource={"Bucket": bucket_name, "Key": source_file_key},
23-
Key=destination_file_key,
24-
)
25-
s3_client.delete_object(Bucket=bucket_name, Key=source_file_key)
26-
logger.info("File moved from %s to %s", source_file_key, destination_file_key)

lambdas/filenameprocessor/tests/test_utils_for_filenameprocessor.py

Lines changed: 1 addition & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99

1010
from utils_for_tests.mock_environment_variables import (
1111
MOCK_ENVIRONMENT_DICT,
12-
BucketNames,
1312
)
1413
from utils_for_tests.utils_for_filenameprocessor_tests import (
1514
GenericSetUp,
@@ -20,7 +19,7 @@
2019
with patch.dict("os.environ", MOCK_ENVIRONMENT_DICT):
2120
from common.clients import REGION_NAME
2221
from constants import AUDIT_TABLE_TTL_DAYS
23-
from utils_for_filenameprocessor import get_creation_and_expiry_times, move_file
22+
from utils_for_filenameprocessor import get_creation_and_expiry_times
2423

2524
s3_client = boto3_client("s3", region_name=REGION_NAME)
2625

@@ -50,20 +49,3 @@ def test_get_creation_and_expiry_times(self):
5049

5150
self.assertEqual(created_at_formatted_string, expected_result_created_at)
5251
self.assertEqual(expires_at, expected_result_expires_at)
53-
54-
def test_move_file(self):
55-
"""Tests that move_file correctly moves a file from one location to another within a single S3 bucket"""
56-
source_file_key = "test_file_key"
57-
destination_file_key = "destination/test_file_key"
58-
source_file_content = "test_content"
59-
s3_client.put_object(Bucket=BucketNames.SOURCE, Key=source_file_key, Body=source_file_content)
60-
61-
move_file(BucketNames.SOURCE, source_file_key, destination_file_key)
62-
63-
keys_of_objects_in_bucket = [
64-
obj["Key"] for obj in s3_client.list_objects_v2(Bucket=BucketNames.SOURCE).get("Contents")
65-
]
66-
self.assertNotIn(source_file_key, keys_of_objects_in_bucket)
67-
self.assertIn(destination_file_key, keys_of_objects_in_bucket)
68-
destination_file_content = s3_client.get_object(Bucket=BucketNames.SOURCE, Key=destination_file_key)
69-
self.assertEqual(destination_file_content["Body"].read().decode("utf-8"), source_file_content)

lambdas/recordprocessor/src/batch_processor.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,14 +9,15 @@
99

1010
from audit_table import update_audit_table_status
1111
from common.clients import logger
12+
from common.utils import move_file
1213
from constants import (
1314
ARCHIVE_DIR_NAME,
1415
PROCESSING_DIR_NAME,
1516
SOURCE_BUCKET_NAME,
1617
FileNotProcessedReason,
1718
FileStatus,
1819
)
19-
from file_level_validation import file_is_empty, file_level_validation, move_file
20+
from file_level_validation import file_is_empty, file_level_validation
2021
from mappings import map_target_disease
2122
from process_row import process_row
2223
from send_to_kinesis import send_to_kinesis

lambdas/recordprocessor/src/file_level_validation.py

Lines changed: 2 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,8 @@
66
from csv import DictReader
77

88
from audit_table import update_audit_table_status
9-
from common.clients import get_s3_client, logger
9+
from common.clients import logger
10+
from common.utils import move_file
1011
from constants import (
1112
ARCHIVE_DIR_NAME,
1213
EXPECTED_CSV_HEADERS,
@@ -61,18 +62,6 @@ def get_permitted_operations(supplier: str, vaccine_type: str, allowed_permissio
6162
return permitted_operations_for_vaccine_type
6263

6364

64-
def move_file(bucket_name: str, source_file_key: str, destination_file_key: str) -> None:
65-
"""Moves a file from one location to another within a single S3 bucket by copying and then deleting the file."""
66-
s3_client = get_s3_client()
67-
s3_client.copy_object(
68-
Bucket=bucket_name,
69-
CopySource={"Bucket": bucket_name, "Key": source_file_key},
70-
Key=destination_file_key,
71-
)
72-
s3_client.delete_object(Bucket=bucket_name, Key=source_file_key)
73-
logger.info("File moved from %s to %s", source_file_key, destination_file_key)
74-
75-
7665
@file_level_validation_logging_decorator
7766
def file_level_validation(incoming_message_body: dict) -> dict:
7867
"""

lambdas/recordprocessor/tests/test_utils_for_recordprocessor.py

Lines changed: 0 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,6 @@
2323
)
2424

2525
with patch("os.environ", MOCK_ENVIRONMENT_DICT):
26-
from file_level_validation import move_file
2726
from utils_for_recordprocessor import (
2827
create_diagnostics_dictionary,
2928
get_csv_content_dict_reader,
@@ -87,23 +86,6 @@ def test_create_diagnostics_dictionary(self):
8786
},
8887
)
8988

90-
def test_move_file(self):
91-
"""Tests that move_file correctly moves a file from one location to another within a single S3 bucket"""
92-
source_file_key = "test_file_key"
93-
destination_file_key = "archive/test_file_key"
94-
source_file_content = "test_content"
95-
s3_client.put_object(Bucket=BucketNames.SOURCE, Key=source_file_key, Body=source_file_content)
96-
97-
move_file(BucketNames.SOURCE, source_file_key, destination_file_key)
98-
99-
keys_of_objects_in_bucket = [
100-
obj["Key"] for obj in s3_client.list_objects_v2(Bucket=BucketNames.SOURCE).get("Contents")
101-
]
102-
self.assertNotIn(source_file_key, keys_of_objects_in_bucket)
103-
self.assertIn(destination_file_key, keys_of_objects_in_bucket)
104-
destination_file_content = s3_client.get_object(Bucket=BucketNames.SOURCE, Key=destination_file_key)
105-
self.assertEqual(destination_file_content["Body"].read().decode("utf-8"), source_file_content)
106-
10789

10890
if __name__ == "__main__":
10991
unittest.main()

lambdas/shared/src/common/utils.py

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
"""Non-imms Utility Functions"""
2+
3+
from common.clients import get_s3_client, logger
4+
5+
6+
def move_file(bucket_name: str, source_file_key: str, destination_file_key: str) -> None:
7+
"""Moves a file from one location to another within a single S3 bucket by copying and then deleting the file."""
8+
s3_client = get_s3_client()
9+
s3_client.copy_object(
10+
Bucket=bucket_name,
11+
CopySource={"Bucket": bucket_name, "Key": source_file_key},
12+
Key=destination_file_key,
13+
)
14+
s3_client.delete_object(Bucket=bucket_name, Key=source_file_key)
15+
logger.info("File moved from %s to %s", source_file_key, destination_file_key)

0 commit comments

Comments
 (0)