Skip to content

Commit 5da7c2a

Browse files
committed
Merge branch 'VED-956-fix-missed-test-changes' into VED-956-Audit-Table-Update
2 parents 37ed457 + 0b1489d commit 5da7c2a

File tree

1 file changed

+52
-119
lines changed

1 file changed

+52
-119
lines changed

lambdas/ack_backend/tests/test_ack_processor.py

Lines changed: 52 additions & 119 deletions
Original file line numberDiff line numberDiff line change
@@ -117,49 +117,47 @@ def assert_audit_entry_counts_equal(self, message_id: str, expected_counts: dict
117117
TableName=AUDIT_TABLE_NAME, Key={"message_id": {"S": message_id}}
118118
).get("Item")
119119

120-
actual_counts = {}
121-
actual_counts["record_count"] = audit_entry.get("record_count", {}).get("N")
122-
actual_counts["records_succeeded"] = audit_entry.get("records_succeeded", {}).get("N")
123-
actual_counts["records_failed"] = audit_entry.get("records_failed", {}).get("N")
120+
actual_counts = {
121+
"record_count": audit_entry.get("record_count", {}).get("N"),
122+
"records_succeeded": audit_entry.get("records_succeeded", {}).get("N"),
123+
"records_failed": audit_entry.get("records_failed", {}).get("N"),
124+
}
124125

125-
self.assertEqual(actual_counts, expected_counts)
126+
self.assertDictEqual(actual_counts, expected_counts)
126127

127128
def test_lambda_handler_main_multiple_records(self):
128129
"""Test lambda handler with multiple records."""
129130
# Set up an audit entry which does not yet have record_count recorded
130131
add_audit_entry_to_table(self.dynamodb_client, "row")
131-
# First array of messages: all successful. Rows 1 to 3
132-
array_of_success_messages = [
132+
# First array of messages. Rows 1 to 3
133+
array_of_messages_one = [
133134
{
134-
**BASE_SUCCESS_MESSAGE,
135+
**BASE_FAILURE_MESSAGE,
135136
"row_id": f"row^{i}",
136-
"imms_id": f"imms_{i}",
137137
"local_id": f"local^{i}",
138138
}
139139
for i in range(1, 4)
140140
]
141-
# Second array of messages: all with diagnostics (failure messages). Rows 4 to 7
142-
array_of_failure_messages = [
141+
# Second batch array of messages. Rows 4 to 7
142+
array_of_messages_two = [
143143
{**BASE_FAILURE_MESSAGE, "row_id": f"row^{i}", "local_id": f"local^{i}"} for i in range(4, 8)
144144
]
145-
# Third array of messages: mixture of success and failure messages. Rows 8 to 11
146-
array_of_mixed_success_and_failure_messages = [
145+
# Third array of messages: mixture of diagnostic info.
146+
array_of_messages_three = [
147147
{
148148
**BASE_FAILURE_MESSAGE,
149149
"row_id": "row^8",
150150
"local_id": "local^8",
151151
"diagnostics": DiagnosticsDictionaries.CUSTOM_VALIDATION_ERROR,
152152
},
153153
{
154-
**BASE_SUCCESS_MESSAGE,
154+
**BASE_FAILURE_MESSAGE,
155155
"row_id": "row^9",
156-
"imms_id": "imms_9",
157156
"local_id": "local^9",
158157
},
159158
{
160-
**BASE_SUCCESS_MESSAGE,
159+
**BASE_FAILURE_MESSAGE,
161160
"row_id": "row^10",
162-
"imms_id": "imms_10",
163161
"local_id": "local^10",
164162
},
165163
{
@@ -172,15 +170,15 @@ def test_lambda_handler_main_multiple_records(self):
172170

173171
event = {
174172
"Records": [
175-
{"body": json.dumps(array_of_success_messages)},
176-
{"body": json.dumps(array_of_failure_messages)},
177-
{"body": json.dumps(array_of_mixed_success_and_failure_messages)},
173+
{"body": json.dumps(array_of_messages_one)},
174+
{"body": json.dumps(array_of_messages_two)},
175+
{"body": json.dumps(array_of_messages_three)},
178176
]
179177
}
180178
expected_entry_counts = {
181179
"record_count": None,
182180
"records_succeeded": None,
183-
"records_failed": "6",
181+
"records_failed": "11",
184182
}
185183

186184
response = lambda_handler(event=event, context={})
@@ -189,67 +187,58 @@ def test_lambda_handler_main_multiple_records(self):
189187
validate_ack_file_content(
190188
self.s3_client,
191189
[
192-
*array_of_success_messages,
193-
*array_of_failure_messages,
194-
*array_of_mixed_success_and_failure_messages,
190+
*array_of_messages_one,
191+
*array_of_messages_two,
192+
*array_of_messages_three,
195193
],
196194
existing_file_content=ValidValues.ack_headers,
197195
)
198196
self.assert_audit_entry_counts_equal("row", expected_entry_counts)
199197

200198
def test_lambda_handler_main(self):
201-
"""Test lambda handler with consitent ack_file_name and message_template."""
199+
"""Test lambda handler with consistent ack_file_name and message_template."""
202200
# Set up an audit entry which does not yet have record_count recorded
203201
add_audit_entry_to_table(self.dynamodb_client, "row")
204202
test_cases = [
205-
{
206-
"description": "Multiple messages: all successful",
207-
"messages": [{"row_id": f"row^{i + 1}"} for i in range(10)],
208-
},
209203
{
210204
"description": "Multiple messages: all with diagnostics (failure messages)",
211205
"messages": [
212206
{"row_id": "row^1", "diagnostics": DiagnosticsDictionaries.UNIQUE_ID_MISSING},
213207
{"row_id": "row^2", "diagnostics": DiagnosticsDictionaries.NO_PERMISSIONS},
214208
{"row_id": "row^3", "diagnostics": DiagnosticsDictionaries.RESOURCE_NOT_FOUND_ERROR},
215209
],
210+
"expected_failures_cum_tot": "3",
216211
},
217212
{
218-
"description": "Multiple messages: mixture of success and failure messages",
213+
"description": "Multiple messages: mixture of diagnostic outputs",
219214
"messages": [
220-
{"row_id": "row^1", "imms_id": "TEST_IMMS_ID"},
221-
{"row_id": "row^2", "diagnostics": DiagnosticsDictionaries.UNIQUE_ID_MISSING},
215+
{"row_id": "row^1", "diagnostics": DiagnosticsDictionaries.UNIQUE_ID_MISSING},
216+
{"row_id": "row^2", "diagnostics": DiagnosticsDictionaries.CUSTOM_VALIDATION_ERROR},
222217
{"row_id": "row^3", "diagnostics": DiagnosticsDictionaries.CUSTOM_VALIDATION_ERROR},
223-
{"row_id": "row^4"},
224-
{"row_id": "row^5", "diagnostics": DiagnosticsDictionaries.CUSTOM_VALIDATION_ERROR},
225-
{"row_id": "row^6", "diagnostics": DiagnosticsDictionaries.CUSTOM_VALIDATION_ERROR},
226-
{"row_id": "row^7"},
227-
{"row_id": "row^8", "diagnostics": DiagnosticsDictionaries.IDENTIFIER_DUPLICATION_ERROR},
218+
{"row_id": "row^4", "diagnostics": DiagnosticsDictionaries.CUSTOM_VALIDATION_ERROR},
219+
{"row_id": "row^5", "diagnostics": DiagnosticsDictionaries.IDENTIFIER_DUPLICATION_ERROR},
228220
],
229-
},
230-
{
231-
"description": "Single row: success",
232-
"messages": [{"row_id": "row^1"}],
221+
"expected_failures_cum_tot": "8",
233222
},
234223
{
235224
"description": "Single row: malformed diagnostics info from forwarder",
236225
"messages": [{"row_id": "row^1", "diagnostics": "SHOULD BE A DICTIONARY, NOT A STRING"}],
226+
"expected_failures_cum_tot": "9",
237227
},
238228
]
239-
expected_records_failed = [None, "3", "8", "8", "9"]
240-
expected_entry_counts = {
241-
"record_count": None,
242-
"records_succeeded": None,
243-
"records_failed": None,
244-
}
245229

246-
for test_case, expected_failures in zip(test_cases, expected_records_failed):
247-
expected_entry_counts["records_failed"] = expected_failures
248-
# Test scenario where there is no existing ack file
230+
for test_case in test_cases:
249231
with self.subTest(msg=f"No existing ack file: {test_case['description']}"):
250232
response = lambda_handler(event=self.generate_event(test_case["messages"]), context={})
251233
self.assertEqual(response, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS)
252-
self.assert_audit_entry_counts_equal("row", expected_entry_counts)
234+
self.assert_audit_entry_counts_equal(
235+
"row",
236+
{
237+
"record_count": None,
238+
"records_succeeded": None,
239+
"records_failed": test_case["expected_failures_cum_tot"],
240+
},
241+
)
253242
validate_ack_file_content(self.s3_client, test_case["messages"])
254243

255244
self.s3_client.delete_object(
@@ -269,28 +258,27 @@ def test_lambda_handler_updates_ack_file_but_does_not_mark_complete_when_records
269258

270259
# Original source file had 100 records
271260
add_audit_entry_to_table(self.dynamodb_client, mock_batch_message_id, record_count=100)
272-
array_of_success_messages = [
261+
array_of_failure_messages = [
273262
{
274-
**BASE_SUCCESS_MESSAGE,
263+
**BASE_FAILURE_MESSAGE,
275264
"row_id": f"{mock_batch_message_id}^{i}",
276-
"imms_id": f"imms_{i}",
277265
"local_id": f"local^{i}",
278266
}
279267
for i in range(1, 4)
280268
]
281-
test_event = {"Records": [{"body": json.dumps(array_of_success_messages)}]}
269+
test_event = {"Records": [{"body": json.dumps(array_of_failure_messages)}]}
282270
expected_entry_counts = {
283271
"record_count": "100",
284272
"records_succeeded": None,
285-
"records_failed": None,
286-
}
273+
"records_failed": "3",
274+
} # Records succeeded not updated until all records are processed
287275

288276
response = lambda_handler(event=test_event, context={})
289277

290278
self.assertEqual(response, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS)
291279
validate_ack_file_content(
292280
self.s3_client,
293-
[*array_of_success_messages],
281+
[*array_of_failure_messages],
294282
existing_file_content=ValidValues.ack_headers,
295283
)
296284
self.assert_ack_and_source_file_locations_correct(
@@ -324,31 +312,30 @@ def test_lambda_handler_updates_ack_file_and_marks_complete_when_all_records_pro
324312
Body=StringIO(existing_ack_content).getvalue(),
325313
)
326314

327-
array_of_success_messages = [
315+
array_of_failure_messages = [
328316
{
329-
**BASE_SUCCESS_MESSAGE,
317+
**BASE_FAILURE_MESSAGE,
330318
"row_id": f"{mock_batch_message_id}^{i}",
331-
"imms_id": f"imms_{i}",
332319
"local_id": f"local^{i}",
333320
}
334321
for i in range(50, 101)
335322
]
336323

337324
# Include the EoF message in the event
338-
all_messages_plus_eof = deepcopy(array_of_success_messages)
325+
all_messages_plus_eof = deepcopy(array_of_failure_messages)
339326
all_messages_plus_eof.append(MOCK_MESSAGE_DETAILS.eof_message)
340327
test_event = {"Records": [{"body": json.dumps(all_messages_plus_eof)}]}
341328
expected_entry_counts = {
342329
"record_count": "100",
343-
"records_succeeded": "100",
344-
"records_failed": None,
330+
"records_succeeded": "49",
331+
"records_failed": "51",
345332
}
346333

347334
response = lambda_handler(event=test_event, context={})
348335

349336
self.assertEqual(response, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS)
350337
validate_ack_file_content(
351-
self.s3_client, [*array_of_success_messages], existing_file_content=existing_ack_content, is_complete=True
338+
self.s3_client, [*array_of_failure_messages], existing_file_content=existing_ack_content, is_complete=True
352339
)
353340
self.assert_ack_and_source_file_locations_correct(
354341
MOCK_MESSAGE_DETAILS.file_key,
@@ -359,60 +346,6 @@ def test_lambda_handler_updates_ack_file_and_marks_complete_when_all_records_pro
359346
self.assert_audit_entry_status_equals(mock_batch_message_id, "Processed")
360347
self.assert_audit_entry_counts_equal(mock_batch_message_id, expected_entry_counts)
361348

362-
def test_lambda_handler_sets_records_succeeded(self):
363-
"""
364-
Test that the records_succeeded count is set when all records have been processed.
365-
"""
366-
mock_batch_message_id = "75db20e6-c0b5-4012-a8bc-f861a1dd4b22"
367-
368-
# Original source file had 100 records
369-
add_audit_entry_to_table(self.dynamodb_client, mock_batch_message_id, record_count=100)
370-
371-
# Previous invocations have already created and added to the temp ack file
372-
existing_ack_content = generate_sample_existing_ack_content()
373-
374-
self.s3_client.put_object(
375-
Bucket=BucketNames.DESTINATION,
376-
Key=MOCK_MESSAGE_DETAILS.temp_ack_file_key,
377-
Body=StringIO(existing_ack_content).getvalue(),
378-
)
379-
380-
array_of_success_messages = [
381-
{
382-
**BASE_SUCCESS_MESSAGE,
383-
"row_id": f"{mock_batch_message_id}^{i}",
384-
"imms_id": f"imms_{i}",
385-
"local_id": f"local^{i}",
386-
}
387-
for i in range(50, 75)
388-
]
389-
array_of_failure_messages = [
390-
{
391-
**BASE_FAILURE_MESSAGE,
392-
"row_id": f"{mock_batch_message_id}^{i}",
393-
"local_id": f"local^{i}",
394-
"diagnostics": DiagnosticsDictionaries.UNHANDLED_ERROR,
395-
}
396-
for i in range(75, 100)
397-
]
398-
399-
# Include the EoF message in the event
400-
array_of_messages = deepcopy(array_of_success_messages) + deepcopy(array_of_failure_messages)
401-
all_messages_plus_eof = array_of_messages
402-
all_messages_plus_eof.append(MOCK_MESSAGE_DETAILS.eof_message)
403-
test_event = {"Records": [{"body": json.dumps(all_messages_plus_eof)}]}
404-
expected_entry_counts = {
405-
"record_count": "100",
406-
"records_succeeded": "75",
407-
"records_failed": "25",
408-
}
409-
410-
response = lambda_handler(event=test_event, context={})
411-
412-
self.assertEqual(response, EXPECTED_ACK_LAMBDA_RESPONSE_FOR_SUCCESS)
413-
self.assert_audit_entry_status_equals(mock_batch_message_id, "Processed")
414-
self.assert_audit_entry_counts_equal(mock_batch_message_id, expected_entry_counts)
415-
416349
def test_lambda_handler_error_scenarios(self):
417350
"""Test that the lambda handler raises appropriate exceptions for malformed event data."""
418351

0 commit comments

Comments
 (0)