Skip to content

Commit b5d4471

Browse files
VED-245 Batch e2e tests fix (#557)
Co-authored-by: Matt Jarvis <[email protected]> Co-authored-by: Matt Jarvis <[email protected]>
1 parent 6c4c6a8 commit b5d4471

File tree

10 files changed

+166
-49
lines changed

10 files changed

+166
-49
lines changed

azure/templates/post-deploy.yml

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -116,9 +116,10 @@ steps:
116116
117117
- bash: |
118118
pyenv install -s 3.10.8
119+
pyenv install -s 3.11.11
119120
pyenv global 3.10.8
120121
python --version
121-
displayName: Set Python 3.10
122+
displayName: Install python 3.10 and 3.11
122123
123124
- bash: |
124125
set -e
@@ -175,9 +176,9 @@ steps:
175176
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e"
176177
displayName: Run Full Test Suite
177178
178-
179-
180179
- bash: |
180+
pyenv local 3.11
181+
poetry env use 3.11
181182
set -e
182183
if ! [[ "$APIGEE_ENVIRONMENT" == "prod" || "$APIGEE_ENVIRONMENT" == "int" || "$APIGEE_ENVIRONMENT" == *"sandbox" ]]; then
183184
echo "Running E2E batch folder test cases"
@@ -197,7 +198,7 @@ steps:
197198
198199
poetry install --no-root # Install dependencies defined in pyproject.toml
199200
200-
ENV="$workspace" poetry run python -m unittest -v -c
201+
ENVIRONMENT="$workspace" poetry run python -m unittest -v -c
201202
202203
echo "E2E batch folder test cases executed successfully"
203204
else

e2e_batch/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
11
-include .env
22

33
run-immunization-batch:
4-
ENV=$(environment) poetry run python -m unittest -v -c
4+
ENVIRONMENT=$(environment) poetry run python -m unittest -v -c

e2e_batch/README.md

Whitespace-only changes.

e2e_batch/clients.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,16 +3,17 @@
33
"""
44

55
import logging
6+
from constants import (environment, REGION)
67
from boto3 import client as boto3_client, resource as boto3_resource
78

89

910
# AWS Clients and Resources
10-
REGION = "eu-west-2"
11+
1112

1213
s3_client = boto3_client("s3", region_name=REGION)
1314

1415
dynamodb = boto3_resource("dynamodb", region_name=REGION)
15-
table_name = "imms-internal-dev-imms-events"
16+
table_name = f"imms-{environment}-imms-events"
1617
table = dynamodb.Table(table_name)
1718
# Logger
1819
logging.basicConfig(level="INFO")

e2e_batch/constants.py

Lines changed: 4 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,23 +1,12 @@
11
import os
2-
32
from datetime import datetime, timezone
4-
from clients import logger
5-
6-
env_value = os.environ.get("ENV", "internal-dev")
7-
environment_value = os.environ.get("ENVIRONMENT", "")
8-
logger.info("Environment : %s", env_value)
9-
logger.info("Build Env : %s", environment_value)
103

11-
env_internal_dev = environment_value == "internal-dev"
4+
environment = os.environ.get("ENVIRONMENT", "internal-dev")
5+
REGION = "eu-west-2"
126

13-
SOURCE_BUCKET = f"immunisation-batch-{env_value}-data-sources"
7+
SOURCE_BUCKET = f"immunisation-batch-{environment}-data-sources"
148
INPUT_PREFIX = ""
15-
ACK_BUCKET = (
16-
"immunisation-batch-ref-data-destinations"
17-
if env_value == "ref"
18-
else "immunisation-batch-internal-dev-data-destinations"
19-
)
20-
9+
ACK_BUCKET = f"immunisation-batch-{environment}-data-destinations"
2110
FORWARDEDFILE_PREFIX = "forwardedFile/"
2211
PRE_VALIDATION_ERROR = "Validation errors: doseQuantity.value must be a number"
2312
POST_VALIDATION_ERROR = "Validation errors: contained[?(@.resourceType=='Patient')].name[0].given is a mandatory field"

e2e_batch/poetry.lock

Lines changed: 4 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

e2e_batch/pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,6 @@ license = "MIT"
77
readme = "README.md"
88

99
[tool.poetry.dependencies]
10-
python = "~3.10"
10+
python = "~3.11"
1111
boto3 = "~1.38.35"
12-
pandas = "^2.3.0"
12+
pandas = "^2.3.0"

e2e_batch/test_e2e_batch.py

Lines changed: 100 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,9 @@
1010
upload_config_file,
1111
generate_csv_with_ordered_100000_rows,
1212
verify_final_ack_file,
13+
delete_file_from_s3
1314
)
15+
1416
from constants import (
1517
SOURCE_BUCKET,
1618
INPUT_PREFIX,
@@ -19,119 +21,201 @@
1921
POST_VALIDATION_ERROR,
2022
DUPLICATE,
2123
FILE_NAME_VAL_ERROR,
22-
env_value
24+
environment
2325
)
2426

2527

2628
class TestE2EBatch(unittest.TestCase):
27-
if env_value != "ref":
29+
def setUp(self):
30+
self.uploaded_files = [] # Tracks uploaded input keys
31+
self.ack_files = [] # Tracks ack keys
32+
33+
def tearDown(self):
34+
for file_key in self.uploaded_files:
35+
delete_file_from_s3(SOURCE_BUCKET, file_key)
36+
for ack_key in self.ack_files:
37+
delete_file_from_s3(ACK_BUCKET, ack_key)
2838

39+
if environment != "ref":
2940
def test_create_success(self):
3041
"""Test CREATE scenario."""
3142
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
32-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
43+
44+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
45+
self.uploaded_files.append(key)
46+
3347
ack_key = wait_for_ack_file(None, input_file)
48+
self.ack_files.append(ack_key)
49+
3450
validate_row_count(input_file, ack_key)
51+
3552
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
3653
check_ack_file_content(ack_content, "OK", None, "CREATE")
3754

3855
def test_duplicate_create(self):
3956
"""Test DUPLICATE scenario."""
57+
4058
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True)
41-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
59+
60+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
61+
self.uploaded_files.append(key)
62+
4263
ack_key = wait_for_ack_file(None, input_file)
64+
self.ack_files.append(ack_key)
65+
4366
validate_row_count(input_file, ack_key)
67+
4468
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
4569
check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE")
4670

4771
def test_update_success(self):
4872
"""Test UPDATE scenario."""
4973
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE")
50-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
74+
75+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
76+
self.uploaded_files.append(key)
77+
5178
ack_key = wait_for_ack_file(None, input_file)
79+
self.ack_files.append(ack_key)
80+
5281
validate_row_count(input_file, ack_key)
82+
5383
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
5484
check_ack_file_content(ack_content, "OK", None, "UPDATE")
5585

5686
def test_reinstated_success(self):
5787
"""Test REINSTATED scenario."""
5888
input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED")
59-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
89+
90+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
91+
self.uploaded_files.append(key)
92+
6093
ack_key = wait_for_ack_file(None, input_file)
94+
self.ack_files.append(ack_key)
95+
6196
validate_row_count(input_file, ack_key)
97+
6298
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
6399
check_ack_file_content(ack_content, "OK", None, "reinstated")
64100

65101
def test_update_reinstated_success(self):
66102
"""Test UPDATE-REINSTATED scenario."""
67103
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED")
68-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
104+
105+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
106+
self.uploaded_files.append(key)
107+
69108
ack_key = wait_for_ack_file(None, input_file)
109+
self.ack_files.append(ack_key)
110+
70111
validate_row_count(input_file, ack_key)
112+
71113
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
72114
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
73115

74116
def test_delete_success(self):
75117
"""Test DELETE scenario."""
76118
input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE")
77-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
119+
120+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
121+
self.uploaded_files.append(key)
122+
78123
ack_key = wait_for_ack_file(None, input_file)
124+
self.ack_files.append(ack_key)
125+
79126
validate_row_count(input_file, ack_key)
127+
80128
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
81129
check_ack_file_content(ack_content, "OK", None, "DELETE")
82130

83131
def test_pre_validation_error(self):
84132
"""Test PRE-VALIDATION error scenario."""
85133
input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE")
86-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
134+
135+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
136+
self.uploaded_files.append(key)
137+
87138
ack_key = wait_for_ack_file(None, input_file)
139+
self.ack_files.append(ack_key)
140+
88141
validate_row_count(input_file, ack_key)
142+
89143
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
90144
check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None)
91145

92146
def test_post_validation_error(self):
93147
"""Test POST-VALIDATION error scenario."""
94148
input_file = generate_csv("", "0.3", action_flag="CREATE")
95-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
149+
150+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
151+
self.uploaded_files.append(key)
152+
96153
ack_key = wait_for_ack_file(None, input_file)
154+
self.ack_files.append(ack_key)
155+
97156
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
98157
check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None)
99158

100159
def test_file_name_validation_error(self):
101160
"""Test FILE-NAME-VALIDATION error scenario."""
102161
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", file_key=True)
103-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
162+
163+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
164+
self.uploaded_files.append(key)
165+
104166
ack_key = wait_for_ack_file(True, input_file)
167+
self.ack_files.append(ack_key)
168+
105169
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
106170
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
107171

108172
def test_header_name_validation_error(self):
109173
"""Test HEADER-NAME-VALIDATION error scenario."""
110174
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", headers="NH_NUMBER")
111-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
175+
176+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
177+
self.uploaded_files.append(key)
178+
112179
ack_key = wait_for_ack_file(True, input_file)
180+
self.ack_files.append(ack_key)
181+
113182
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
114183
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
115184

185+
# This test updates the permissions_config.json file from the imms-internal-dev-supplier-config
186+
# S3 bucket shared across multiple environments (PR environments, internal-dev, int, and ref).
187+
# Running this may modify permissions in these environments, causing unintended side effects.
188+
@unittest.skip("Modifies shared S3 permissions configuration")
116189
def test_invalid_permission(self):
117190
"""Test INVALID-PERMISSION error scenario."""
118-
upload_config_file("MMR_FULL")
191+
upload_config_file("MMR_FULL") # permissions_config.json is updated here
119192
time.sleep(20)
193+
120194
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
121-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
195+
196+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
197+
self.uploaded_files.append(key)
198+
122199
ack_key = wait_for_ack_file(True, input_file)
200+
self.ack_files.append(ack_key)
201+
123202
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
124203
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
204+
125205
upload_config_file("COVID19_FULL")
126206
time.sleep(20)
127207

128208
else:
129-
130209
def test_end_to_end_speed_test_with_100000_rows(self):
131210
"""Test end_to_end_speed_test_with_100000_rows scenario with full integration"""
132211
input_file = generate_csv_with_ordered_100000_rows(None)
133-
upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
212+
213+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
214+
self.uploaded_files.append(key)
215+
134216
final_ack_key = wait_for_ack_file(None, input_file, timeout=1800)
217+
self.ack_files.append(final_ack_key)
218+
135219
response = verify_final_ack_file(final_ack_key)
136220
assert response is True
137221

0 commit comments

Comments
 (0)