|
1 | | -import asyncio |
2 | 1 | import unittest |
3 | 2 | from utils import ( |
4 | 3 | generate_csv, |
|
7 | 6 | wait_for_ack_file, |
8 | 7 | check_ack_file_content, |
9 | 8 | validate_row_count, |
10 | | - upload_config_file, |
11 | | - generate_csv_with_ordered_100000_rows, |
12 | | - verify_final_ack_file, |
13 | 9 | delete_file_from_s3 |
14 | 10 | ) |
15 | 11 |
|
16 | 12 | from constants import ( |
17 | 13 | SOURCE_BUCKET, |
18 | 14 | INPUT_PREFIX, |
19 | 15 | ACK_BUCKET, |
20 | | - PRE_VALIDATION_ERROR, |
21 | | - POST_VALIDATION_ERROR, |
22 | 16 | DUPLICATE, |
23 | | - FILE_NAME_VAL_ERROR, |
24 | 17 | environment |
25 | 18 | ) |
26 | 19 |
|
27 | 20 |
|
28 | | -class TestE2EBatch(unittest.IsolatedAsyncioTestCase): |
29 | | - async def asyncSetUp(self): |
| 21 | +class TestE2EBatch(unittest.TestCase): |
| 22 | + def setUp(self): |
30 | 23 | self.uploaded_files = [] # Tracks uploaded input keys |
31 | 24 | self.ack_files = [] # Tracks ack keys |
32 | 25 |
|
33 | | - async def asyncTearDown(self): |
| 26 | + def tearDown(self): |
34 | 27 | for file_key in self.uploaded_files: |
35 | 28 | delete_file_from_s3(SOURCE_BUCKET, file_key) |
36 | 29 | for ack_key in self.ack_files: |
37 | 30 | delete_file_from_s3(ACK_BUCKET, ack_key) |
38 | 31 |
|
39 | 32 | if environment != "ref": |
40 | | - async def test_create_success(self): |
| 33 | + def test_create_success(self): |
41 | 34 | """Test CREATE scenario.""" |
42 | | - input_file = generate_csv("PHYLIS", "0.3", |
43 | | - action_flag="CREATE", offset=1, |
44 | | - vax_type="COVID19", ods="8HA94") |
| 35 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=1, vax_type="COVID19", ods="8HA94") |
45 | 36 |
|
46 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 37 | + key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
47 | 38 | self.uploaded_files.append(key) |
48 | 39 |
|
49 | | - ack_key = await wait_for_ack_file(None, input_file) |
| 40 | + ack_key = wait_for_ack_file(None, input_file) |
50 | 41 | self.ack_files.append(ack_key) |
51 | 42 |
|
52 | 43 | validate_row_count(input_file, ack_key) |
53 | 44 |
|
54 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 45 | + ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
55 | 46 | check_ack_file_content(ack_content, "OK", None, "CREATE") |
56 | 47 |
|
57 | | - async def test_duplicate_create(self): |
| 48 | + def test_duplicate_create(self): |
58 | 49 | """Test DUPLICATE scenario.""" |
59 | 50 |
|
60 | | - input_file = generate_csv("PHYLIS", "0.3", |
61 | | - action_flag="CREATE", same_id=True, offset=2, |
62 | | - vax_type="FLU", ods="8HK48") |
| 51 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True, offset=2, |
| 52 | + vax_type="COVID19", ods="8HA94") |
63 | 53 |
|
64 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 54 | + key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
65 | 55 | self.uploaded_files.append(key) |
66 | 56 |
|
67 | | - ack_key = await wait_for_ack_file(None, input_file) |
| 57 | + ack_key = wait_for_ack_file(None, input_file) |
68 | 58 | self.ack_files.append(ack_key) |
69 | 59 |
|
70 | 60 | validate_row_count(input_file, ack_key) |
71 | 61 |
|
72 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 62 | + ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
73 | 63 | check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE") |
74 | 64 |
|
75 | | - async def test_update_success(self): |
| 65 | + def test_update_success(self): |
76 | 66 | """Test UPDATE scenario.""" |
77 | 67 | input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE", |
78 | 68 | offset=3, vax_type="MMR", ods="V0V8L") |
79 | 69 |
|
80 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 70 | + key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
81 | 71 | self.uploaded_files.append(key) |
82 | 72 |
|
83 | | - ack_key = await wait_for_ack_file(None, input_file) |
| 73 | + ack_key = wait_for_ack_file(None, input_file) |
84 | 74 | self.ack_files.append(ack_key) |
85 | 75 |
|
86 | 76 | validate_row_count(input_file, ack_key) |
87 | 77 |
|
88 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 78 | + ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
89 | 79 | check_ack_file_content(ack_content, "OK", None, "UPDATE") |
90 | 80 |
|
91 | | - async def test_reinstated_success(self): |
| 81 | + def test_reinstated_success(self): |
92 | 82 | """Test REINSTATED scenario.""" |
93 | 83 | input_file = generate_csv("PHYLIS", "0.5", |
94 | 84 | action_flag="REINSTATED", offset=4, |
95 | 85 | vax_type="HPV", ods="DPSREDUCED") |
96 | 86 |
|
97 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 87 | + key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
98 | 88 | self.uploaded_files.append(key) |
99 | 89 |
|
100 | | - ack_key = await wait_for_ack_file(None, input_file) |
| 90 | + ack_key = wait_for_ack_file(None, input_file) |
101 | 91 | self.ack_files.append(ack_key) |
102 | 92 |
|
103 | 93 | validate_row_count(input_file, ack_key) |
104 | 94 |
|
105 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 95 | + ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
106 | 96 | check_ack_file_content(ack_content, "OK", None, "reinstated") |
107 | | - |
108 | | - async def test_update_reinstated_success(self): |
109 | | - """Test UPDATE-REINSTATED scenario.""" |
110 | | - input_file = generate_csv("PHYLIS", "0.5", |
111 | | - action_flag="UPDATE-REINSTATED", offset=5, |
112 | | - vax_type="MENACWY", ods="DPSFULL") |
113 | | - |
114 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
115 | | - self.uploaded_files.append(key) |
116 | | - |
117 | | - ack_key = await wait_for_ack_file(None, input_file) |
118 | | - self.ack_files.append(ack_key) |
119 | | - |
120 | | - validate_row_count(input_file, ack_key) |
121 | | - |
122 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
123 | | - check_ack_file_content(ack_content, "OK", None, "update-reinstated") |
124 | | - |
125 | | - async def test_delete_success(self): |
126 | | - """Test DELETE scenario.""" |
127 | | - input_file = generate_csv("PHYLIS", "0.8", |
128 | | - action_flag="DELETE", offset=6, |
129 | | - vax_type="MMR", ods="V0V8L") |
130 | | - |
131 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
132 | | - self.uploaded_files.append(key) |
133 | | - |
134 | | - ack_key = await wait_for_ack_file(None, input_file) |
135 | | - self.ack_files.append(ack_key) |
136 | | - |
137 | | - validate_row_count(input_file, ack_key) |
138 | | - |
139 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
140 | | - check_ack_file_content(ack_content, "OK", None, "DELETE") |
141 | | - |
142 | | - async def test_pre_validation_error(self): |
143 | | - """Test PRE-VALIDATION error scenario.""" |
144 | | - input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE", |
145 | | - offset=7, vax_type="MMR", ods="X8E5B") |
146 | | - |
147 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
148 | | - self.uploaded_files.append(key) |
149 | | - |
150 | | - ack_key = await wait_for_ack_file(None, input_file) |
151 | | - self.ack_files.append(ack_key) |
152 | | - |
153 | | - validate_row_count(input_file, ack_key) |
154 | | - |
155 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
156 | | - check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None) |
157 | | - |
158 | | - async def test_post_validation_error(self): |
159 | | - """Test POST-VALIDATION error scenario.""" |
160 | | - input_file = generate_csv("", "0.3", action_flag="CREATE", |
161 | | - offset=8, vax_type="3IN1", ods="YGJ") |
162 | | - |
163 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
164 | | - self.uploaded_files.append(key) |
165 | | - |
166 | | - ack_key = await wait_for_ack_file(None, input_file) |
167 | | - self.ack_files.append(ack_key) |
168 | | - |
169 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
170 | | - check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None) |
171 | | - |
172 | | - async def test_file_name_validation_error(self): |
173 | | - """Test FILE-NAME-VALIDATION error scenario.""" |
174 | | - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", |
175 | | - file_key=True, offset=9, |
176 | | - vax_type="HPV", ods="YGA") |
177 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
178 | | - self.uploaded_files.append(key) |
179 | | - |
180 | | - ack_key = await wait_for_ack_file(True, input_file) |
181 | | - self.ack_files.append(ack_key) |
182 | | - |
183 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
184 | | - check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) |
185 | | - |
186 | | - async def test_header_name_validation_error(self): |
187 | | - """Test HEADER-NAME-VALIDATION error scenario.""" |
188 | | - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", |
189 | | - headers="NH_NUMBER", offset=10, |
190 | | - vax_type="3IN1", ods="YGMYW") |
191 | | - |
192 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
193 | | - self.uploaded_files.append(key) |
194 | | - |
195 | | - ack_key = await wait_for_ack_file(True, input_file) |
196 | | - self.ack_files.append(ack_key) |
197 | | - |
198 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
199 | | - check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) |
200 | | - |
201 | | - # This test updates the permissions_config.json file from the imms-internal-dev-supplier-config |
202 | | - # S3 bucket shared across multiple environments (PR environments, internal-dev, int, and ref). |
203 | | - # Running this may modify permissions in these environments, causing unintended side effects. |
204 | | - @unittest.skip("Modifies shared S3 permissions configuration") |
205 | | - async def test_invalid_permission(self): |
206 | | - """Test INVALID-PERMISSION error scenario.""" |
207 | | - await upload_config_file("MMR_FULL") # permissions_config.json is updated here |
208 | | - await asyncio.sleep(20) |
209 | | - |
210 | | - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", |
211 | | - offset=11, vax_type="PINNACLE", ods="8J1100001") |
212 | | - |
213 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
214 | | - self.uploaded_files.append(key) |
215 | | - |
216 | | - ack_key = await wait_for_ack_file(True, input_file) |
217 | | - self.ack_files.append(ack_key) |
218 | | - |
219 | | - ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
220 | | - check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) |
221 | | - |
222 | | - await upload_config_file("COVID19_FULL") |
223 | | - await asyncio.sleep(20) |
224 | | - |
225 | | - else: |
226 | | - async def test_end_to_end_speed_test_with_100000_rows(self): |
227 | | - """Test end_to_end_speed_test_with_100000_rows scenario with full integration""" |
228 | | - input_file = generate_csv_with_ordered_100000_rows(12, |
229 | | - vax_type="COVID19", ods="DPSFULL") |
230 | | - |
231 | | - key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
232 | | - self.uploaded_files.append(key) |
233 | | - |
234 | | - final_ack_key = await wait_for_ack_file(None, input_file, timeout=1800) |
235 | | - self.ack_files.append(final_ack_key) |
236 | | - |
237 | | - response = await verify_final_ack_file(final_ack_key) |
238 | | - assert response is True |
239 | | - |
240 | | - |
241 | | -if __name__ == "__main__": |
242 | | - unittest.main() |
0 commit comments