Skip to content

Commit 33f3189

Browse files
committed
async and task max
1 parent 72b0078 commit 33f3189

File tree

7 files changed

+181
-337
lines changed

7 files changed

+181
-337
lines changed

azure/templates/post-deploy.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@ steps:
236236
237237
displayName: e2e serial
238238
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e_batch"
239-
condition: eq(1, 2) # Disable task but make this step visible in the pipeline
239+
# condition: eq(1, 1) # Disable task but make this step visible in the pipeline
240240
241241
- bash: |
242242
pyenv local 3.11
@@ -269,7 +269,7 @@ steps:
269269
270270
displayName: e2e parallel 2
271271
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e_batch"
272-
condition: eq(1, 1) # Disable task but make this step visible in the pipeline
272+
condition: eq(1, 2) # Disable task but make this step visible in the pipeline
273273
274274
- task: PublishTestResults@2
275275
displayName: 'Publish test results'

e2e_batch/test_e2e_batch.py

Lines changed: 165 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
import asyncio
12
import unittest
23
from utils import (
34
generate_csv,
@@ -6,90 +7,218 @@
67
wait_for_ack_file,
78
check_ack_file_content,
89
validate_row_count,
10+
upload_config_file,
11+
generate_csv_with_ordered_100000_rows,
12+
verify_final_ack_file,
913
delete_file_from_s3
1014
)
11-
from per_test import monitor
1215

1316
from constants import (
1417
SOURCE_BUCKET,
1518
INPUT_PREFIX,
1619
ACK_BUCKET,
20+
PRE_VALIDATION_ERROR,
21+
POST_VALIDATION_ERROR,
1722
DUPLICATE,
23+
FILE_NAME_VAL_ERROR,
1824
environment
1925
)
2026

21-
OFFSET = 0 # Days to offset the recorded date by (can be negative)
2227

23-
24-
class TestE2EBatch(unittest.TestCase):
25-
def setUp(self):
28+
class TestE2EBatch(unittest.IsolatedAsyncioTestCase):
29+
async def asyncSetUp(self):
2630
self.uploaded_files = [] # Tracks uploaded input keys
2731
self.ack_files = [] # Tracks ack keys
2832

29-
def tearDown(self):
30-
# get name of unit test
31-
unit_test_name = self._testMethodName
32-
marker = f"tearDown-{unit_test_name}"
33-
34-
monitor(marker, is_test=False)
33+
async def asyncTearDown(self):
3534
for file_key in self.uploaded_files:
3635
delete_file_from_s3(SOURCE_BUCKET, file_key)
3736
for ack_key in self.ack_files:
3837
delete_file_from_s3(ACK_BUCKET, ack_key)
39-
monitor(marker, is_test=False)
4038

4139
if environment != "ref":
42-
def test_create_success(self):
40+
async def test_create_success(self):
4341
"""Test CREATE scenario."""
44-
monitor("test_create_success")
45-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=OFFSET)
42+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=1)
4643

47-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
44+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
4845
self.uploaded_files.append(key)
4946

50-
ack_key = wait_for_ack_file(None, input_file)
47+
ack_key = await wait_for_ack_file(None, input_file)
5148
self.ack_files.append(ack_key)
5249

5350
validate_row_count(input_file, ack_key)
5451

55-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
52+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
5653
check_ack_file_content(ack_content, "OK", None, "CREATE")
5754

58-
monitor("test_create_success")
59-
60-
def test_duplicate_create(self):
55+
async def test_duplicate_create(self):
6156
"""Test DUPLICATE scenario."""
6257

63-
monitor("test_duplicate_create")
58+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True, offset=2)
6459

65-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=OFFSET, same_id=True)
66-
67-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
60+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
6861
self.uploaded_files.append(key)
6962

70-
ack_key = wait_for_ack_file(None, input_file)
63+
ack_key = await wait_for_ack_file(None, input_file)
7164
self.ack_files.append(ack_key)
7265

7366
validate_row_count(input_file, ack_key)
7467

75-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
68+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
7669
check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE")
7770

78-
monitor("test_duplicate_create")
79-
80-
def test_update_success(self):
71+
async def test_update_success(self):
8172
"""Test UPDATE scenario."""
82-
monitor("test_update_success")
83-
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE", offset=OFFSET)
73+
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE", offset=3)
8474

85-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
75+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
8676
self.uploaded_files.append(key)
8777

88-
ack_key = wait_for_ack_file(None, input_file)
78+
ack_key = await wait_for_ack_file(None, input_file)
8979
self.ack_files.append(ack_key)
9080

9181
validate_row_count(input_file, ack_key)
9282

93-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
83+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
9484
check_ack_file_content(ack_content, "OK", None, "UPDATE")
95-
monitor("test_update_success")
85+
86+
async def test_reinstated_success(self):
87+
"""Test REINSTATED scenario."""
88+
input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED", offset=4)
89+
90+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
91+
self.uploaded_files.append(key)
92+
93+
ack_key = await wait_for_ack_file(None, input_file)
94+
self.ack_files.append(ack_key)
95+
96+
validate_row_count(input_file, ack_key)
97+
98+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
99+
check_ack_file_content(ack_content, "OK", None, "reinstated")
100+
101+
async def test_update_reinstated_success(self):
102+
"""Test UPDATE-REINSTATED scenario."""
103+
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED", offset=5)
104+
105+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
106+
self.uploaded_files.append(key)
107+
108+
ack_key = await wait_for_ack_file(None, input_file)
109+
self.ack_files.append(ack_key)
110+
111+
validate_row_count(input_file, ack_key)
112+
113+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
114+
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
115+
116+
async def test_delete_success(self):
117+
"""Test DELETE scenario."""
118+
input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE", offset=6)
119+
120+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
121+
self.uploaded_files.append(key)
122+
123+
ack_key = await wait_for_ack_file(None, input_file)
124+
self.ack_files.append(ack_key)
125+
126+
validate_row_count(input_file, ack_key)
127+
128+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
129+
check_ack_file_content(ack_content, "OK", None, "DELETE")
130+
131+
async def test_pre_validation_error(self):
132+
"""Test PRE-VALIDATION error scenario."""
133+
input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE", offset=7)
134+
135+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
136+
self.uploaded_files.append(key)
137+
138+
ack_key = await wait_for_ack_file(None, input_file)
139+
self.ack_files.append(ack_key)
140+
141+
validate_row_count(input_file, ack_key)
142+
143+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
144+
check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None)
145+
146+
async def test_post_validation_error(self):
147+
"""Test POST-VALIDATION error scenario."""
148+
input_file = generate_csv("", "0.3", action_flag="CREATE", offset=8)
149+
150+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
151+
self.uploaded_files.append(key)
152+
153+
ack_key = await wait_for_ack_file(None, input_file)
154+
self.ack_files.append(ack_key)
155+
156+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
157+
check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None)
158+
159+
async def test_file_name_validation_error(self):
160+
"""Test FILE-NAME-VALIDATION error scenario."""
161+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", file_key=True, offset=9)
162+
163+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
164+
self.uploaded_files.append(key)
165+
166+
ack_key = await wait_for_ack_file(True, input_file)
167+
self.ack_files.append(ack_key)
168+
169+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
170+
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
171+
172+
async def test_header_name_validation_error(self):
173+
"""Test HEADER-NAME-VALIDATION error scenario."""
174+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", headers="NH_NUMBER", offset=10)
175+
176+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
177+
self.uploaded_files.append(key)
178+
179+
ack_key = await wait_for_ack_file(True, input_file)
180+
self.ack_files.append(ack_key)
181+
182+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
183+
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
184+
185+
# This test updates the permissions_config.json file from the imms-internal-dev-supplier-config
186+
# S3 bucket shared across multiple environments (PR environments, internal-dev, int, and ref).
187+
# Running this may modify permissions in these environments, causing unintended side effects.
188+
@unittest.skip("Modifies shared S3 permissions configuration")
189+
async def test_invalid_permission(self):
190+
"""Test INVALID-PERMISSION error scenario."""
191+
await upload_config_file("MMR_FULL") # permissions_config.json is updated here
192+
await asyncio.sleep(20)
193+
194+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=11)
195+
196+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
197+
self.uploaded_files.append(key)
198+
199+
ack_key = await wait_for_ack_file(True, input_file)
200+
self.ack_files.append(ack_key)
201+
202+
ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key)
203+
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
204+
205+
await upload_config_file("COVID19_FULL")
206+
await asyncio.sleep(20)
207+
208+
else:
209+
async def test_end_to_end_speed_test_with_100000_rows(self):
210+
"""Test end_to_end_speed_test_with_100000_rows scenario with full integration"""
211+
input_file = generate_csv_with_ordered_100000_rows(12)
212+
213+
key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
214+
self.uploaded_files.append(key)
215+
216+
final_ack_key = await wait_for_ack_file(None, input_file, timeout=1800)
217+
self.ack_files.append(final_ack_key)
218+
219+
response = await verify_final_ack_file(final_ack_key)
220+
assert response is True
221+
222+
223+
if __name__ == "__main__":
224+
unittest.main()

e2e_batch/test_e2e_batch1.py

Lines changed: 0 additions & 91 deletions
This file was deleted.

0 commit comments

Comments
 (0)