Skip to content

Commit 670b87d

Browse files
authored
Merge pull request #351 from NHSDigital/AMB-2347
AMB-2347 : Immunisation Batch End to End Automation Testing REF
2 parents 7300329 + 13d1436 commit 670b87d

File tree

6 files changed

+276
-132
lines changed

6 files changed

+276
-132
lines changed

azure/templates/post-deploy.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -179,7 +179,7 @@ steps:
179179
180180
- bash: |
181181
set -e
182-
if ! [[ "$APIGEE_ENVIRONMENT" == "ref" || "$APIGEE_ENVIRONMENT" == "prod" || "$APIGEE_ENVIRONMENT" == "int" || "$APIGEE_ENVIRONMENT" == *"sandbox" ]]; then
182+
if ! [[ "$APIGEE_ENVIRONMENT" == "prod" || "$APIGEE_ENVIRONMENT" == "int" || "$APIGEE_ENVIRONMENT" == *"sandbox" ]]; then
183183
echo "Running E2E batch folder test cases"
184184
185185
export AWS_PROFILE="apim-dev"
@@ -201,7 +201,7 @@ steps:
201201

202202
echo "E2E batch folder test cases executed successfully"
203203
else
204-
echo "Skipping E2E batch folder test cases as the environment is ref-prod-int-sandbox"
204+
echo "Skipping E2E batch folder test cases as the environment is prod-int-sandbox"
205205
fi
206206

207207
displayName: Run full batch test suite

e2e/extract_failed_tests.py

Lines changed: 0 additions & 22 deletions
This file was deleted.

e2e_batch/clear_dynamodb.py

Lines changed: 72 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,72 @@
1+
import boto3
2+
3+
# Get the DynamoDB table name
4+
TABLE_NAME = "imms-internal-dev-imms-events"
5+
6+
if not TABLE_NAME:
7+
raise ValueError("DYNAMODB_TABLE_NAME environment variable is not set")
8+
9+
# Initialize DynamoDB client
10+
dynamodb = boto3.resource("dynamodb")
11+
table = dynamodb.Table(TABLE_NAME)
12+
13+
14+
def get_primary_keys():
15+
"""Retrieve the primary key schema of the table."""
16+
response = table.key_schema
17+
return [key["AttributeName"] for key in response]
18+
19+
20+
def get_total_count():
21+
"""Get the total count of items in the table, handling pagination."""
22+
total_count = 0
23+
last_evaluated_key = None
24+
25+
while True:
26+
if last_evaluated_key:
27+
response = table.scan(Select="COUNT", ExclusiveStartKey=last_evaluated_key)
28+
else:
29+
response = table.scan(Select="COUNT")
30+
31+
total_count += response.get("Count", 0)
32+
last_evaluated_key = response.get("LastEvaluatedKey")
33+
34+
if not last_evaluated_key:
35+
break
36+
37+
return total_count
38+
39+
40+
def clear_dynamodb():
41+
"""Deletes all items from the DynamoDB table, handling pagination."""
42+
print(f"Clearing DynamoDB table: {TABLE_NAME}")
43+
44+
primary_keys = get_primary_keys()
45+
if not primary_keys:
46+
raise ValueError("Unable to retrieve primary key schema")
47+
48+
total_count = get_total_count()
49+
print(f"Total items before deletion: {total_count}")
50+
51+
deleted_count = 0
52+
53+
while True:
54+
scan = table.scan()
55+
items = scan.get("Items", [])
56+
57+
if not items:
58+
break
59+
60+
with table.batch_writer() as batch:
61+
for item in items:
62+
key = {pk: item[pk] for pk in primary_keys}
63+
batch.delete_item(Key=key)
64+
deleted_count += 1
65+
66+
print(f"Deleted {len(items)} items...")
67+
68+
print(f"Total {deleted_count} items deleted from DynamoDB")
69+
70+
71+
if __name__ == "__main__":
72+
clear_dynamodb()

e2e_batch/constants.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,6 @@
11
import os
2-
# import logging
2+
33
from datetime import datetime, timezone
4-
# logging.basicConfig(level="INFO")
5-
# logger = logging.getLogger()
6-
# logger.setLevel("INFO")
74
from clients import logger
85

96
env_value = os.environ.get("ENV", "internal-dev")
@@ -16,6 +13,7 @@
1613
POST_VALIDATION_ERROR = "Validation errors: contained[?(@.resourceType=='Patient')].name[0].given is a mandatory field"
1714
DUPLICATE = "The provided identifier:"
1815
ACK_PREFIX = "ack/"
16+
HEADER_RESPONSE_CODE_COLUMN = "HEADER_RESPONSE_CODE"
1917
FILE_NAME_VAL_ERROR = "Infrastructure Level Response Value - Processing Error"
2018
CONFIG_BUCKET = "imms-internal-dev-supplier-config"
2119
PERMISSIONS_CONFIG_FILE_KEY = "permissions_config.json"

e2e_batch/test_e2e_batch.py

Lines changed: 113 additions & 99 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,9 @@
77
wait_for_ack_file,
88
check_ack_file_content,
99
validate_row_count,
10-
upload_config_file
10+
upload_config_file,
11+
generate_csv_with_ordered_100000_rows,
12+
verify_final_ack_file,
1113
)
1214
from constants import (
1315
SOURCE_BUCKET,
@@ -17,109 +19,121 @@
1719
POST_VALIDATION_ERROR,
1820
DUPLICATE,
1921
FILE_NAME_VAL_ERROR,
22+
env_value,
2023
)
2124

2225

2326
class TestE2EBatch(unittest.TestCase):
27+
if env_value != "ref":
2428

25-
def test_create_success(self):
26-
"""Test CREATE scenario."""
27-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
28-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
29-
ack_key = wait_for_ack_file(None, input_file)
30-
validate_row_count(input_file, ack_key)
31-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
32-
check_ack_file_content(ack_content, "OK", None, "CREATE")
33-
34-
def test_duplicate_create(self):
35-
"""Test DUPLICATE scenario."""
36-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True)
37-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
38-
ack_key = wait_for_ack_file(None, input_file)
39-
validate_row_count(input_file, ack_key)
40-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
41-
check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE")
42-
43-
def test_update_success(self):
44-
"""Test UPDATE scenario."""
45-
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE")
46-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
47-
ack_key = wait_for_ack_file(None, input_file)
48-
validate_row_count(input_file, ack_key)
49-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
50-
check_ack_file_content(ack_content, "OK", None, "UPDATE")
51-
52-
def test_reinstated_success(self):
53-
"""Test REINSTATED scenario."""
54-
input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED")
55-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
56-
ack_key = wait_for_ack_file(None, input_file)
57-
validate_row_count(input_file, ack_key)
58-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
59-
check_ack_file_content(ack_content, "OK", None, "reinstated")
60-
61-
def test_update_reinstated_success(self):
62-
"""Test UPDATE-REINSTATED scenario."""
63-
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED")
64-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
65-
ack_key = wait_for_ack_file(None, input_file)
66-
validate_row_count(input_file, ack_key)
67-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
68-
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
69-
70-
def test_delete_success(self):
71-
"""Test DELETE scenario."""
72-
input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE")
73-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
74-
ack_key = wait_for_ack_file(None, input_file)
75-
validate_row_count(input_file, ack_key)
76-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
77-
check_ack_file_content(ack_content, "OK", None, "DELETE")
78-
79-
def test_pre_validation_error(self):
80-
"""Test PRE-VALIDATION error scenario."""
81-
input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE")
82-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
83-
ack_key = wait_for_ack_file(None, input_file)
84-
validate_row_count(input_file, ack_key)
85-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
86-
check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None)
87-
88-
def test_post_validation_error(self):
89-
"""Test POST-VALIDATION error scenario."""
90-
input_file = generate_csv("", "0.3", action_flag="CREATE")
91-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
92-
ack_key = wait_for_ack_file(None, input_file)
93-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
94-
check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None)
95-
96-
def test_file_name_validation_error(self):
97-
"""Test FILE-NAME-VALIDATION error scenario."""
98-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", file_key=True)
99-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
100-
ack_key = wait_for_ack_file(True, input_file)
101-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
102-
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
103-
104-
def test_header_name_validation_error(self):
105-
"""Test HEADER-NAME-VALIDATION error scenario."""
106-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", headers="NH_NUMBER")
107-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
108-
ack_key = wait_for_ack_file(True, input_file)
109-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
110-
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
111-
112-
def test_invalid_permission(self):
113-
"""Test INVALID-PERMISSION error scenario."""
114-
upload_config_file("MMR_FULL")
115-
time.sleep(20)
116-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
117-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
118-
ack_key = wait_for_ack_file(True, input_file)
119-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
120-
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
121-
upload_config_file("COVID19_FULL")
122-
time.sleep(20)
29+
def test_create_success(self):
30+
"""Test CREATE scenario."""
31+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
32+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
33+
ack_key = wait_for_ack_file(None, input_file)
34+
validate_row_count(input_file, ack_key)
35+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
36+
check_ack_file_content(ack_content, "OK", None, "CREATE")
37+
38+
def test_duplicate_create(self):
39+
"""Test DUPLICATE scenario."""
40+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True)
41+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
42+
ack_key = wait_for_ack_file(None, input_file)
43+
validate_row_count(input_file, ack_key)
44+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
45+
check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE")
46+
47+
def test_update_success(self):
48+
"""Test UPDATE scenario."""
49+
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE")
50+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
51+
ack_key = wait_for_ack_file(None, input_file)
52+
validate_row_count(input_file, ack_key)
53+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
54+
check_ack_file_content(ack_content, "OK", None, "UPDATE")
55+
56+
def test_reinstated_success(self):
57+
"""Test REINSTATED scenario."""
58+
input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED")
59+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
60+
ack_key = wait_for_ack_file(None, input_file)
61+
validate_row_count(input_file, ack_key)
62+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
63+
check_ack_file_content(ack_content, "OK", None, "reinstated")
64+
65+
def test_update_reinstated_success(self):
66+
"""Test UPDATE-REINSTATED scenario."""
67+
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED")
68+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
69+
ack_key = wait_for_ack_file(None, input_file)
70+
validate_row_count(input_file, ack_key)
71+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
72+
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
73+
74+
def test_delete_success(self):
75+
"""Test DELETE scenario."""
76+
input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE")
77+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
78+
ack_key = wait_for_ack_file(None, input_file)
79+
validate_row_count(input_file, ack_key)
80+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
81+
check_ack_file_content(ack_content, "OK", None, "DELETE")
82+
83+
def test_pre_validation_error(self):
84+
"""Test PRE-VALIDATION error scenario."""
85+
input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE")
86+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
87+
ack_key = wait_for_ack_file(None, input_file)
88+
validate_row_count(input_file, ack_key)
89+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
90+
check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None)
91+
92+
def test_post_validation_error(self):
93+
"""Test POST-VALIDATION error scenario."""
94+
input_file = generate_csv("", "0.3", action_flag="CREATE")
95+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
96+
ack_key = wait_for_ack_file(None, input_file)
97+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
98+
check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None)
99+
100+
def test_file_name_validation_error(self):
101+
"""Test FILE-NAME-VALIDATION error scenario."""
102+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", file_key=True)
103+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
104+
ack_key = wait_for_ack_file(True, input_file)
105+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
106+
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
107+
108+
def test_header_name_validation_error(self):
109+
"""Test HEADER-NAME-VALIDATION error scenario."""
110+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", headers="NH_NUMBER")
111+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
112+
ack_key = wait_for_ack_file(True, input_file)
113+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
114+
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
115+
116+
def test_invalid_permission(self):
117+
"""Test INVALID-PERMISSION error scenario."""
118+
upload_config_file("MMR_FULL")
119+
time.sleep(20)
120+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
121+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
122+
ack_key = wait_for_ack_file(True, input_file)
123+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
124+
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
125+
upload_config_file("COVID19_FULL")
126+
time.sleep(20)
127+
128+
else:
129+
130+
def test_end_to_end_speed_test_with_100000_rows(self):
131+
"""Test end_to_end_speed_test_with_100000_rows scenario with full integration"""
132+
input_file = generate_csv_with_ordered_100000_rows(None)
133+
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
134+
final_ack_key = wait_for_ack_file(None, input_file, timeout=1800)
135+
response = verify_final_ack_file(final_ack_key)
136+
assert response is True
123137

124138

125139
if __name__ == "__main__":

0 commit comments

Comments
 (0)