Skip to content

Commit 6f90c2a

Browse files
committed
debug
1 parent 44629d8 commit 6f90c2a

File tree

7 files changed

+71
-46
lines changed

7 files changed

+71
-46
lines changed

ack_backend/src/ack_processor.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44
from logging_decorators import ack_lambda_handler_logging_decorator
55
from update_ack_file import update_ack_file
66
from convert_message_to_ack_row import convert_message_to_ack_row
7+
from clients import logger
78

89

910
@ack_lambda_handler_logging_decorator
@@ -13,7 +14,7 @@ def lambda_handler(event, context):
1314
For each record: each message in the array of messages is converted to an ack row,
1415
then all of the ack rows for that array of messages are uploaded to the ack file in one go.
1516
"""
16-
17+
logger.info("SAW DEBUG ack_processor.lambda_handler")
1718
if not event.get("Records"):
1819
raise ValueError("Error in ack_processor_lambda_handler: No records found in the event")
1920

@@ -24,7 +25,7 @@ def lambda_handler(event, context):
2425
ack_data_rows = []
2526

2627
for i, record in enumerate(event["Records"]):
27-
28+
logger.info(f"SAW DEBUG record {i}")
2829
try:
2930
incoming_message_body = json.loads(record["body"])
3031
except Exception as body_json_error:

ack_backend/src/convert_message_to_ack_row.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from typing import Union
44
from logging_decorators import convert_message_to_ack_row_logging_decorator
55
from update_ack_file import create_ack_data
6-
6+
from clients import logger
77

88
def get_error_message_for_ack_file(message_diagnostics) -> Union[None, str]:
99
"""Determines and returns the error message to be displayed in the ack file"""
@@ -26,6 +26,8 @@ def convert_message_to_ack_row(message, created_at_formatted_string):
2626
A value error is raised if the file_key or created_at_formatted_string for the message do not match the
2727
expected values.
2828
"""
29+
logger.info("SAW DEBUG convert_message_to_ack_row")
30+
logger.info(f"SAW DEBUG convert_message_to_ack_row: {message}")
2931
diagnostics = message.get("diagnostics")
3032
return create_ack_data(
3133
created_at_formatted_string=created_at_formatted_string,

ack_backend/src/update_ack_file.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ def create_ack_data(
2020
) -> dict:
2121
"""Returns a dictionary containing the ack headers as keys, along with the relevant values."""
2222
# Pack multi-line diagnostics down to single line (because Imms API diagnostics may be multi-line)
23+
logger.info("SAW DEBUG create_ack_data")
24+
logger.info(f"SAW DEBUG create_ack_data: {diagnostics}")
2325
diagnostics = (
2426
" ".join(diagnostics.replace("\r", " ").replace("\n", " ").replace("\t", " ").replace("\xa0", " ").split())
2527
if diagnostics is not None

e2e_batch/clients.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,13 @@
1313
s3_client = boto3_client("s3", region_name=REGION)
1414

1515
dynamodb = boto3_resource("dynamodb", region_name=REGION)
16+
sqs_client = boto3_client('sqs', region_name=REGION)
1617
table_name = f"imms-{environment}-imms-events"
1718
table = dynamodb.Table(table_name)
1819
audit_table_name = f"immunisation-batch-{environment}-audit-table"
1920
audit_table = dynamodb.Table(audit_table_name)
21+
batch_fifo_queue_name = f"imms-{environment}-batch-file-created-queue.fifo"
22+
batch_fifo_queue_url = sqs_client.get_queue_url(QueueName=batch_fifo_queue_name)['QueueUrl']
2023
# Logger
2124
logging.basicConfig(level="INFO")
2225
logger = logging.getLogger()

e2e_batch/constants.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,7 +66,6 @@ def create_row(unique_id, dose_amount, action_flag: ActionFlag, header, inject_c
6666
"""Helper function to create a single row with the specified UNIQUE_ID and ACTION_FLAG."""
6767

6868
name = "James" if not inject_char else b'Jam\xe9s'
69-
7069
return {
7170
header: "9732928395",
7271
"PERSON_FORENAME": "PHYLIS",
@@ -79,7 +78,7 @@ def create_row(unique_id, dose_amount, action_flag: ActionFlag, header, inject_c
7978
"SITE_CODE_TYPE_URI": "https://fhir.nhs.uk/Id/ods-organization-code",
8079
"UNIQUE_ID": unique_id,
8180
"UNIQUE_ID_URI": "https://www.ravs.england.nhs.uk/",
82-
"ACTION_FLAG": action_flag if action_flag != "NEW" else "CREATE", # TODO check if this is needed
81+
"ACTION_FLAG": action_flag,
8382
"PERFORMING_PROFESSIONAL_FORENAME": "PHYLIS",
8483
"PERFORMING_PROFESSIONAL_SURNAME": name,
8584
"RECORDED_DATE": datetime.now(timezone.utc).strftime("%Y%m%d"),

e2e_batch/test_e2e_batch.py

Lines changed: 45 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -34,17 +34,7 @@
3434
class TestE2EBatch(unittest.TestCase):
3535

3636
def setUp(self):
37-
self.seed_datas = [
38-
TestData("V0V8L", ActionSet.CREATE),
39-
# TestData("8HK48", ActionSet.UPDATE),
40-
# TestData("8HA94", ActionSet.DELETE),
41-
# TestData("Reinstate", "X26", [NEW, DELETE, UPDATE]),
42-
# TestData("Update no Create", "YGM41", [UPDATE], expected=BusRowResult.FATAL_ERROR,
43-
# operation_outcome=OpMsgs.IMM_NOT_EXIST),
44-
# TestData("Delete no Create", "YGJ", [DELETE], expected=BusRowResult.FATAL_ERROR,
45-
# operation_outcome=OpMsgs.IMM_NOT_EXIST),
46-
# TestData("YGA", ActionSet.CREATE, inject_char=True, name="Create with 1252 char"),
47-
]
37+
self.seed_datas = create_seed_data()
4838

4939
def tearDown(self):
5040
# loop through all files and delete them from s3
@@ -99,31 +89,49 @@ def validate_responses(tests: list[TestData]):
9989
start_time = time.time()
10090
count = 0
10191
expected_count = len(tests) * 2
102-
for test in tests:
103-
logger.info(f"Validation for Test: {test.name} ")
104-
# Validate the ACK file
105-
if test.ack_keys[DestinationType.INF]:
106-
count += 1
107-
inf_ack_content = get_file_content_from_s3(ACK_BUCKET, test.ack_keys[DestinationType.INF])
108-
else:
109-
logger.error(f"INF ACK file not found for test: {test.name}")
110-
111-
check_ack_file_content(test.name, inf_ack_content, "Success", None, test.actions)
112-
if test.ack_keys[DestinationType.BUS]:
113-
count += 1
114-
validate_row_count(f"{test.name} - inf", test.file_name, test.ack_keys[DestinationType.BUS])
115-
# check row after header
116-
bus_ack_content = get_file_content_from_s3(ACK_BUCKET, test.ack_keys[DestinationType.BUS])
117-
# loop through each line in the bus ack content
118-
119-
# sometimes OK and sometimes CREATE
120-
check_inf_file_content(f"{test.name} - bus", bus_ack_content, "OK", test.operation_outcome,
121-
test.getOperations())
92+
try:
93+
for test in tests:
94+
logger.info(f"Validation for Test: {test.name} ")
95+
# Validate the ACK file
96+
if test.ack_keys[DestinationType.INF]:
97+
count += 1
98+
inf_ack_content = get_file_content_from_s3(ACK_BUCKET, test.ack_keys[DestinationType.INF])
99+
check_ack_file_content(test.name, inf_ack_content, "Success", None, test.action_sequence.actions)
100+
else:
101+
logger.error(f"INF ACK file not found for test: {test.name}")
102+
103+
if test.ack_keys[DestinationType.BUS]:
104+
count += 1
105+
validate_row_count(f"{test.name} - inf", test.file_name, test.ack_keys[DestinationType.BUS])
106+
# check row after header
107+
bus_ack_content = get_file_content_from_s3(ACK_BUCKET, test.ack_keys[DestinationType.BUS])
108+
# loop through each line in the bus ack content
109+
110+
# sometimes OK and sometimes CREATE
111+
check_inf_file_content(f"{test.name} - bus", bus_ack_content, "OK", test.operation_outcome,
112+
test.getOperations())
113+
else:
114+
logger.error(f"BUS ACK file not found for test: {test.name}")
115+
except Exception as e:
116+
logger.error(f"Error during validation: {e}")
117+
finally:
118+
if count == expected_count:
119+
logger.info("All responses subject to validation.")
122120
else:
123-
logger.error(f"BUS ACK file not found for test: {test.name}")
121+
logger.error(f"{count} of {expected_count} responses subject to validation.")
122+
logger.info(f"Time: {time.time() - start_time:.1f} seconds")
123+
assert count == expected_count, f"Only {count} of {expected_count} responses subject to validation."
124+
124125

125-
if count == expected_count:
126-
logger.info("All responses subject to validation.")
127-
else:
128-
logger.error(f"{count} of {expected_count} responses subject to validation.")
129-
logger.info(f"Time: {time.time() - start_time:.1f} seconds")
126+
def create_seed_data() -> list[TestData]:
127+
return [
128+
# TestData("V0V8L", ActionSet.CREATE),
129+
# TestData("8HK48", ActionSet.UPDATE),
130+
TestData("8HA94", ActionSet.DELETE),
131+
# TestData("Reinstate", "X26", [NEW, DELETE, UPDATE]),
132+
# TestData("Update no Create", "YGM41", [UPDATE], expected=BusRowResult.FATAL_ERROR,
133+
# operation_outcome=OpMsgs.IMM_NOT_EXIST),
134+
# TestData("Delete no Create", "YGJ", [DELETE], expected=BusRowResult.FATAL_ERROR,
135+
# operation_outcome=OpMsgs.IMM_NOT_EXIST),
136+
# TestData("YGA", ActionSet.CREATE, inject_char=True, name="Create with 1252 char"),
137+
]

e2e_batch/utils.py

Lines changed: 14 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from boto3.dynamodb.conditions import Key
1111
from io import StringIO
1212
from datetime import datetime, timezone
13-
from clients import logger, s3_client, table, audit_table
13+
from clients import logger, s3_client, table, audit_table, batch_fifo_queue_url, sqs_client
1414
from errors import AckFileNotFoundError, DynamoDBMismatchError
1515
from constants import (
1616
ACK_BUCKET,
@@ -88,7 +88,7 @@ def __init__(self,
8888

8989
def getOperations(self) -> list[Operation]:
9090
operations = []
91-
for action in self.actions:
91+
for action in self.action_sequence.actions:
9292
operations.append(get_operation(action))
9393
return operations
9494

@@ -580,7 +580,7 @@ def generate_csv_files(seed_data_list: list[TestData]) -> list[TestData]:
580580
"""Generate CSV files based on a list of TestData instances."""
581581
for seed_data in seed_data_list:
582582
file_name = (generate_csv_file(seed_data, actions=seed_data.action_sequence.actions))
583-
logger.info(f"Test file: {file_name}")
583+
logger.info(f"Create Test file: {file_name}")
584584
seed_data.file_name = file_name
585585
return seed_data_list
586586

@@ -590,7 +590,9 @@ def generate_csv_file(seed: TestData, actions: list[ActionFlag]) -> str:
590590
data = []
591591
for action in actions:
592592
unique_id = str(uuid.uuid4())
593-
data.append(create_row(unique_id, seed.dose_amount, action, seed.header, seed.inject_char))
593+
row = create_row(unique_id, seed.dose_amount, action, seed.header, seed.inject_char)
594+
logger.info(f"> Create row ID: {unique_id}, ACTION_FLAG: {action}")
595+
data.append(row)
594596
df = pd.DataFrame(data)
595597
file_name = get_file_name(seed.vax, seed.ods, seed.version)
596598
df.to_csv(file_name, index=False, sep="|", quoting=csv.QUOTE_MINIMAL)
@@ -629,6 +631,14 @@ def cleanup(data_list: list[TestData]):
629631
if ack_key:
630632
if not delete_file_from_s3(ACK_BUCKET, ack_key):
631633
logger.warning(f"s3 delete fail {ACK_BUCKET}: {ack_key}")
634+
# purge batch_fifo_queue_url
635+
try:
636+
sqs_client.purge_queue(QueueUrl=batch_fifo_queue_url)
637+
print("SQS purge successful.")
638+
except sqs_client.exceptions.PurgeQueueInProgress:
639+
print("SQS purge already in progress. Try again later.")
640+
except Exception as e:
641+
print(f"SQS Purge errord: {e}")
632642

633643

634644
def check_inf_file_content(desc, content, response_code, operation_outcome,

0 commit comments

Comments
 (0)