Skip to content

Commit fe0bf2b

Browse files
committed
multifile
1 parent 1d35d26 commit fe0bf2b

File tree

8 files changed

+285
-219
lines changed

8 files changed

+285
-219
lines changed

e2e_batch/Makefile

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
APIGEE_ACCESS_TOKEN ?= $(shell export SSO_LOGIN_URL=https://login.apigee.com && eval get_token -u $(APIGEE_USERNAME))
44
AWS_DOMAIN_NAME=https://$(shell make -C ../terraform -s output name=service_domain_name || true)
5-
PARALLEL_WORKERS=1
5+
PARALLEL_WORKERS=4
66

77
print-token:
88
@echo "APIGEE_ACCESS_TOKEN=$(APIGEE_ACCESS_TOKEN)"

e2e_batch/constants.py

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,6 @@
1717
CONFIG_BUCKET = "imms-internal-dev-supplier-config"
1818
PERMISSIONS_CONFIG_FILE_KEY = "permissions_config.json"
1919

20-
print(f"SAW. Debug {SOURCE_BUCKET}")
21-
print(f"SAW. Debug {ACK_BUCKET}")
22-
print(f"SAW. Debug {environment}")
23-
2420

2521
def create_row(unique_id, fore_name, dose_amount, action_flag, header):
2622
"""Helper function to create a single row with the specified UNIQUE_ID and ACTION_FLAG."""

e2e_batch/e2e_batch_base.py

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
import unittest
2+
3+
from utils import (
4+
delete_file_from_s3
5+
)
6+
7+
from constants import (
8+
SOURCE_BUCKET,
9+
ACK_BUCKET
10+
)
11+
12+
13+
class TestE2EBatchBase(unittest.TestCase):
14+
15+
def setUp(self):
16+
self.uploaded_files = [] # Tracks uploaded input keys
17+
self.ack_files = [] # Tracks ack keys
18+
19+
def tearDown(self):
20+
for file_key in self.uploaded_files:
21+
delete_file_from_s3(SOURCE_BUCKET, file_key)
22+
for ack_key in self.ack_files:
23+
delete_file_from_s3(ACK_BUCKET, ack_key)

e2e_batch/test_e2e_batch.py

Lines changed: 65 additions & 211 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import time
21
import unittest
32

43
from utils import (
@@ -8,253 +7,108 @@
87
wait_for_ack_file,
98
check_ack_file_content,
109
validate_row_count,
11-
upload_config_file,
12-
generate_csv_with_ordered_100000_rows,
13-
verify_final_ack_file,
14-
delete_file_from_s3
1510
)
1611
from per_test import monitor
1712

1813
from constants import (
1914
SOURCE_BUCKET,
2015
INPUT_PREFIX,
2116
ACK_BUCKET,
22-
PRE_VALIDATION_ERROR,
23-
POST_VALIDATION_ERROR,
2417
DUPLICATE,
25-
FILE_NAME_VAL_ERROR,
2618
environment
2719
)
28-
# load dotenv for run from desktop
29-
# from dotenv import load_dotenv
30-
# load_dotenv()
20+
from e2e_batch_base import TestE2EBatchBase
3121

3222

33-
class TestE2EBatch(unittest.TestCase):
23+
@unittest.skipIf(environment == "ref", "if ref")
24+
class TestE2EBatch(TestE2EBatchBase):
3425

35-
def setUp(self):
36-
self.uploaded_files = [] # Tracks uploaded input keys
37-
self.ack_files = [] # Tracks ack keys
26+
def test_duplicate_create(self):
27+
"""Test DUPLICATE scenario."""
3828

39-
def tearDown(self):
40-
for file_key in self.uploaded_files:
41-
delete_file_from_s3(SOURCE_BUCKET, file_key)
42-
for ack_key in self.ack_files:
43-
delete_file_from_s3(ACK_BUCKET, ack_key)
29+
monitor("test_duplicate_create")
4430

45-
if environment != "ref":
46-
def test_create_success(self):
47-
"""Test CREATE scenario."""
48-
monitor("test_create_success")
49-
input_file = generate_csv("PHYLIS", "0.3", "CREATE",
50-
"RSV", "YGM41")
31+
input_file = generate_csv("PHYLIS", "0.3", "CREATE",
32+
"RSV", "YGM41",
33+
same_id=True)
5134

52-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
53-
self.uploaded_files.append(key)
35+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
36+
self.uploaded_files.append(key)
5437

55-
ack_key = wait_for_ack_file(None, input_file)
56-
self.ack_files.append(ack_key)
38+
ack_key = wait_for_ack_file(None, input_file)
39+
self.ack_files.append(ack_key)
5740

58-
validate_row_count(input_file, ack_key)
41+
validate_row_count(input_file, ack_key)
5942

60-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
61-
check_ack_file_content(ack_content, "OK", None, "CREATE")
43+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
44+
check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE")
6245

63-
monitor("test_create_success")
46+
monitor("test_duplicate_create")
6447

65-
def test_duplicate_create(self):
66-
"""Test DUPLICATE scenario."""
48+
def test_update_success(self):
49+
"""Test UPDATE scenario."""
50+
monitor("test_update_success")
51+
input_file = generate_csv("PHYLIS", "0.5", "UPDATE", "RSV", "YGM41")
6752

68-
monitor("test_duplicate_create")
53+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
54+
self.uploaded_files.append(key)
6955

70-
input_file = generate_csv("PHYLIS", "0.3", "CREATE",
71-
"RSV", "YGM41",
72-
same_id=True)
56+
ack_key = wait_for_ack_file(None, input_file)
57+
self.ack_files.append(ack_key)
7358

74-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
75-
self.uploaded_files.append(key)
59+
validate_row_count(input_file, ack_key)
7660

77-
ack_key = wait_for_ack_file(None, input_file)
78-
self.ack_files.append(ack_key)
61+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
62+
check_ack_file_content(ack_content, "OK", None, "UPDATE")
63+
monitor("test_update_success")
7964

80-
validate_row_count(input_file, ack_key)
65+
def test_reinstated_success(self):
66+
"""Test REINSTATED scenario."""
67+
monitor("test_reinstated_success")
68+
input_file = generate_csv("PHYLIS", "0.5", "REINSTATED", "RSV", "YGM41")
8169

82-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
83-
check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE")
70+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
71+
self.uploaded_files.append(key)
8472

85-
monitor("test_duplicate_create")
73+
ack_key = wait_for_ack_file(None, input_file)
74+
self.ack_files.append(ack_key)
8675

87-
def test_update_success(self):
88-
"""Test UPDATE scenario."""
89-
monitor("test_update_success")
90-
input_file = generate_csv("PHYLIS", "0.5", "UPDATE", "RSV", "YGM41")
76+
validate_row_count(input_file, ack_key)
9177

92-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
93-
self.uploaded_files.append(key)
78+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
79+
check_ack_file_content(ack_content, "OK", None, "reinstated")
80+
monitor("test_reinstated_success")
9481

95-
ack_key = wait_for_ack_file(None, input_file)
96-
self.ack_files.append(ack_key)
82+
def test_update_reinstated_success(self):
83+
"""Test UPDATE-REINSTATED scenario."""
84+
monitor("test_update_reinstated_success")
85+
input_file = generate_csv("PHYLIS", "0.5", "UPDATE-REINSTATED", "RSV", "YGM41")
9786

98-
validate_row_count(input_file, ack_key)
87+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
88+
self.uploaded_files.append(key)
9989

100-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
101-
check_ack_file_content(ack_content, "OK", None, "UPDATE")
102-
monitor("test_update_success")
90+
ack_key = wait_for_ack_file(None, input_file)
91+
self.ack_files.append(ack_key)
10392

104-
def test_reinstated_success(self):
105-
"""Test REINSTATED scenario."""
106-
monitor("test_reinstated_success")
107-
input_file = generate_csv("PHYLIS", "0.5", "REINSTATED", "RSV", "YGM41")
93+
validate_row_count(input_file, ack_key)
10894

109-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
110-
self.uploaded_files.append(key)
95+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
96+
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
97+
monitor("test_update_reinstated_success")
11198

112-
ack_key = wait_for_ack_file(None, input_file)
113-
self.ack_files.append(ack_key)
99+
def test_delete_success(self):
100+
"""Test DELETE scenario."""
101+
monitor("test_delete_success")
102+
input_file = generate_csv("PHYLIS", "0.8", "DELETE", "RSV", "YGM41")
114103

115-
validate_row_count(input_file, ack_key)
104+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
105+
self.uploaded_files.append(key)
116106

117-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
118-
check_ack_file_content(ack_content, "OK", None, "reinstated")
119-
monitor("test_reinstated_success")
107+
ack_key = wait_for_ack_file(None, input_file)
108+
self.ack_files.append(ack_key)
120109

121-
def test_update_reinstated_success(self):
122-
"""Test UPDATE-REINSTATED scenario."""
123-
monitor("test_update_reinstated_success")
124-
input_file = generate_csv("PHYLIS", "0.5", "UPDATE-REINSTATED", "RSV", "YGM41")
110+
validate_row_count(input_file, ack_key)
125111

126-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
127-
self.uploaded_files.append(key)
128-
129-
ack_key = wait_for_ack_file(None, input_file)
130-
self.ack_files.append(ack_key)
131-
132-
validate_row_count(input_file, ack_key)
133-
134-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
135-
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
136-
monitor("test_update_reinstated_success")
137-
138-
def test_delete_success(self):
139-
"""Test DELETE scenario."""
140-
monitor("test_delete_success")
141-
input_file = generate_csv("PHYLIS", "0.8", "DELETE", "RSV", "YGM41")
142-
143-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
144-
self.uploaded_files.append(key)
145-
146-
ack_key = wait_for_ack_file(None, input_file)
147-
self.ack_files.append(ack_key)
148-
149-
validate_row_count(input_file, ack_key)
150-
151-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
152-
check_ack_file_content(ack_content, "OK", None, "DELETE")
153-
monitor("test_delete_success")
154-
155-
def test_pre_validation_error(self):
156-
"""Test PRE-VALIDATION error scenario."""
157-
monitor("test_pre_validation_error")
158-
input_file = generate_csv("PHYLIS", "TRUE", "CREATE", "RSV", "YGM41")
159-
160-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
161-
self.uploaded_files.append(key)
162-
163-
ack_key = wait_for_ack_file(None, input_file)
164-
self.ack_files.append(ack_key)
165-
166-
validate_row_count(input_file, ack_key)
167-
168-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
169-
check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None)
170-
monitor("test_pre_validation_error")
171-
172-
def test_post_validation_error(self):
173-
"""Test POST-VALIDATION error scenario."""
174-
monitor("test_post_validation_error")
175-
input_file = generate_csv("", "0.3", "CREATE", "RSV", "YGM41")
176-
177-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
178-
self.uploaded_files.append(key)
179-
180-
ack_key = wait_for_ack_file(None, input_file)
181-
self.ack_files.append(ack_key)
182-
183-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
184-
check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None)
185-
monitor("test_post_validation_error")
186-
187-
def test_file_name_validation_error(self):
188-
"""Test FILE-NAME-VALIDATION error scenario."""
189-
monitor("test_file_name_validation_error")
190-
input_file = generate_csv("PHYLIS", "0.3", "CREATE", "RSV", "YGM41", file_key=True)
191-
192-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
193-
self.uploaded_files.append(key)
194-
195-
ack_key = wait_for_ack_file(True, input_file)
196-
self.ack_files.append(ack_key)
197-
198-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
199-
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
200-
monitor("test_file_name_validation_error")
201-
202-
def test_header_name_validation_error(self):
203-
"""Test HEADER-NAME-VALIDATION error scenario."""
204-
monitor("test_header_name_validation_error")
205-
input_file = generate_csv("PHYLIS", "0.3", "CREATE", "RSV", "YGM41", headers="NH_NUMBER")
206-
207-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
208-
self.uploaded_files.append(key)
209-
210-
ack_key = wait_for_ack_file(True, input_file)
211-
self.ack_files.append(ack_key)
212-
213-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
214-
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
215-
monitor("test_header_name_validation_error")
216-
217-
# This test updates the permissions_config.json file from the imms-internal-dev-supplier-config
218-
# S3 bucket shared across multiple environments (PR environments, internal-dev, int, and ref).
219-
# Running this may modify permissions in these environments, causing unintended side effects.
220-
@unittest.skip("Modifies shared S3 permissions configuration")
221-
def test_invalid_permission(self):
222-
"""Test INVALID-PERMISSION error scenario."""
223-
monitor("test_invalid_permission")
224-
upload_config_file("MMR_FULL") # permissions_config.json is updated here
225-
time.sleep(20)
226-
227-
input_file = generate_csv("PHYLIS", "0.3", "CREATE", "RSV", "YGM41")
228-
229-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
230-
self.uploaded_files.append(key)
231-
232-
ack_key = wait_for_ack_file(True, input_file)
233-
self.ack_files.append(ack_key)
234-
235-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
236-
check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None)
237-
238-
upload_config_file("COVID19_FULL")
239-
time.sleep(20)
240-
monitor("test_invalid_permission")
241-
242-
else:
243-
def test_end_to_end_speed_test_with_100000_rows(self):
244-
monitor("test_end_to_end_speed_test_with_100000_rows")
245-
"""Test end_to_end_speed_test_with_100000_rows scenario with full integration"""
246-
input_file = generate_csv_with_ordered_100000_rows(None)
247-
248-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
249-
self.uploaded_files.append(key)
250-
251-
final_ack_key = wait_for_ack_file(None, input_file, timeout=1800)
252-
self.ack_files.append(final_ack_key)
253-
254-
response = verify_final_ack_file(final_ack_key)
255-
assert response is True
256-
monitor("test_end_to_end_speed_test_with_100000_rows")
257-
258-
259-
if __name__ == "__main__":
260-
unittest.main()
112+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
113+
check_ack_file_content(ack_content, "OK", None, "DELETE")
114+
monitor("test_delete_success")

0 commit comments

Comments
 (0)