Skip to content

Commit 95e1f8d

Browse files
committed
PoC for 902: to be re-engineered II
1 parent ec703cb commit 95e1f8d

File tree

1 file changed

+9
-3
lines changed

1 file changed

+9
-3
lines changed

lambdas/filenameprocessor/src/file_name_processor.py

Lines changed: 9 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -112,9 +112,6 @@ def handle_record(record) -> dict:
112112
s3_response = get_s3_client().get_object(Bucket=bucket_name, Key=file_key)
113113
created_at_formatted_string, expiry_timestamp = get_creation_and_expiry_times(s3_response)
114114

115-
vaccine_type, supplier = validate_file_key(file_key)
116-
permissions = validate_vaccine_type_permissions(vaccine_type=vaccine_type, supplier=supplier)
117-
118115
# here: if it's an EA file, move it, and upsert it to PROCESSING; use the bucket name as the queue name
119116
if TEST_EA_FILENAME in file_key:
120117
dest_bucket_name = TEST_EA_BUCKET
@@ -133,6 +130,12 @@ def handle_record(record) -> dict:
133130

134131
# TODO: check the file is in the dest bucket, upsert again accordingly.
135132
# NB: not clear yet whether we need to do this in an entirely new lambda.
133+
# Current thoughts is that we don't, because s3_client.copy_object is synchronous,
134+
# therefore the only time we should fail is if the dest bucket is unavailable or we don't
135+
# have permissions.
136+
# NB - in this situation, surely we should not delete the original file, but move it somewhere?
137+
# hence, break up move_file_to_bucket()
138+
136139
if is_file_in_bucket(dest_bucket_name, file_key):
137140
status_code = 200
138141
message = (f"Successfully sent to {dest_bucket_name} for further processing",)
@@ -158,6 +161,9 @@ def handle_record(record) -> dict:
158161
"message_id": message_id,
159162
}
160163
else:
164+
vaccine_type, supplier = validate_file_key(file_key)
165+
permissions = validate_vaccine_type_permissions(vaccine_type=vaccine_type, supplier=supplier)
166+
161167
queue_name = f"{supplier}_{vaccine_type}"
162168
upsert_audit_table(
163169
message_id,

0 commit comments

Comments
 (0)