|
| 1 | +import asyncio |
1 | 2 | import unittest |
2 | 3 | from utils import ( |
3 | 4 | generate_csv, |
|
6 | 7 | wait_for_ack_file, |
7 | 8 | check_ack_file_content, |
8 | 9 | validate_row_count, |
| 10 | + upload_config_file, |
| 11 | + generate_csv_with_ordered_100000_rows, |
| 12 | + verify_final_ack_file, |
9 | 13 | delete_file_from_s3 |
10 | 14 | ) |
11 | | -from per_test import monitor |
12 | 15 |
|
13 | 16 | from constants import ( |
14 | 17 | SOURCE_BUCKET, |
15 | 18 | INPUT_PREFIX, |
16 | 19 | ACK_BUCKET, |
| 20 | + PRE_VALIDATION_ERROR, |
| 21 | + POST_VALIDATION_ERROR, |
17 | 22 | DUPLICATE, |
| 23 | + FILE_NAME_VAL_ERROR, |
18 | 24 | environment |
19 | 25 | ) |
20 | 26 |
|
21 | | -OFFSET = 0 # Days to offset the recorded date by (can be negative) |
22 | 27 |
|
23 | | - |
24 | | -class TestE2EBatch(unittest.TestCase): |
25 | | - def setUp(self): |
| 28 | +class TestE2EBatch(unittest.IsolatedAsyncioTestCase): |
| 29 | + async def asyncSetUp(self): |
26 | 30 | self.uploaded_files = [] # Tracks uploaded input keys |
27 | 31 | self.ack_files = [] # Tracks ack keys |
28 | 32 |
|
29 | | - def tearDown(self): |
30 | | - # get name of unit test |
31 | | - unit_test_name = self._testMethodName |
32 | | - marker = f"tearDown-{unit_test_name}" |
33 | | - |
34 | | - monitor(marker, is_test=False) |
| 33 | + async def asyncTearDown(self): |
35 | 34 | for file_key in self.uploaded_files: |
36 | 35 | delete_file_from_s3(SOURCE_BUCKET, file_key) |
37 | 36 | for ack_key in self.ack_files: |
38 | 37 | delete_file_from_s3(ACK_BUCKET, ack_key) |
39 | | - monitor(marker, is_test=False) |
40 | 38 |
|
41 | 39 | if environment != "ref": |
42 | | - def test_create_success(self): |
| 40 | + async def test_create_success(self): |
43 | 41 | """Test CREATE scenario.""" |
44 | | - monitor("test_create_success") |
45 | | - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=OFFSET) |
| 42 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=1) |
46 | 43 |
|
47 | | - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 44 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
48 | 45 | self.uploaded_files.append(key) |
49 | 46 |
|
50 | | - ack_key = wait_for_ack_file(None, input_file) |
| 47 | + ack_key = await wait_for_ack_file(None, input_file) |
51 | 48 | self.ack_files.append(ack_key) |
52 | 49 |
|
53 | 50 | validate_row_count(input_file, ack_key) |
54 | 51 |
|
55 | | - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 52 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
56 | 53 | check_ack_file_content(ack_content, "OK", None, "CREATE") |
57 | 54 |
|
58 | | - monitor("test_create_success") |
59 | | - |
60 | | - def test_duplicate_create(self): |
| 55 | + async def test_duplicate_create(self): |
61 | 56 | """Test DUPLICATE scenario.""" |
62 | 57 |
|
63 | | - monitor("test_duplicate_create") |
| 58 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True, offset=2) |
64 | 59 |
|
65 | | - input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=OFFSET, same_id=True) |
66 | | - |
67 | | - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 60 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
68 | 61 | self.uploaded_files.append(key) |
69 | 62 |
|
70 | | - ack_key = wait_for_ack_file(None, input_file) |
| 63 | + ack_key = await wait_for_ack_file(None, input_file) |
71 | 64 | self.ack_files.append(ack_key) |
72 | 65 |
|
73 | 66 | validate_row_count(input_file, ack_key) |
74 | 67 |
|
75 | | - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 68 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
76 | 69 | check_ack_file_content(ack_content, "Fatal Error", DUPLICATE, "CREATE") |
77 | 70 |
|
78 | | - monitor("test_duplicate_create") |
79 | | - |
80 | | - def test_update_success(self): |
| 71 | + async def test_update_success(self): |
81 | 72 | """Test UPDATE scenario.""" |
82 | | - monitor("test_update_success") |
83 | | - input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE", offset=OFFSET) |
| 73 | + input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE", offset=3) |
84 | 74 |
|
85 | | - key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 75 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
86 | 76 | self.uploaded_files.append(key) |
87 | 77 |
|
88 | | - ack_key = wait_for_ack_file(None, input_file) |
| 78 | + ack_key = await wait_for_ack_file(None, input_file) |
89 | 79 | self.ack_files.append(ack_key) |
90 | 80 |
|
91 | 81 | validate_row_count(input_file, ack_key) |
92 | 82 |
|
93 | | - ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 83 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
94 | 84 | check_ack_file_content(ack_content, "OK", None, "UPDATE") |
95 | | - monitor("test_update_success") |
| 85 | + |
| 86 | + async def test_reinstated_success(self): |
| 87 | + """Test REINSTATED scenario.""" |
| 88 | + input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED", offset=4) |
| 89 | + |
| 90 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 91 | + self.uploaded_files.append(key) |
| 92 | + |
| 93 | + ack_key = await wait_for_ack_file(None, input_file) |
| 94 | + self.ack_files.append(ack_key) |
| 95 | + |
| 96 | + validate_row_count(input_file, ack_key) |
| 97 | + |
| 98 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 99 | + check_ack_file_content(ack_content, "OK", None, "reinstated") |
| 100 | + |
| 101 | + async def test_update_reinstated_success(self): |
| 102 | + """Test UPDATE-REINSTATED scenario.""" |
| 103 | + input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED", offset=5) |
| 104 | + |
| 105 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 106 | + self.uploaded_files.append(key) |
| 107 | + |
| 108 | + ack_key = await wait_for_ack_file(None, input_file) |
| 109 | + self.ack_files.append(ack_key) |
| 110 | + |
| 111 | + validate_row_count(input_file, ack_key) |
| 112 | + |
| 113 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 114 | + check_ack_file_content(ack_content, "OK", None, "update-reinstated") |
| 115 | + |
| 116 | + async def test_delete_success(self): |
| 117 | + """Test DELETE scenario.""" |
| 118 | + input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE", offset=6) |
| 119 | + |
| 120 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 121 | + self.uploaded_files.append(key) |
| 122 | + |
| 123 | + ack_key = await wait_for_ack_file(None, input_file) |
| 124 | + self.ack_files.append(ack_key) |
| 125 | + |
| 126 | + validate_row_count(input_file, ack_key) |
| 127 | + |
| 128 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 129 | + check_ack_file_content(ack_content, "OK", None, "DELETE") |
| 130 | + |
| 131 | + async def test_pre_validation_error(self): |
| 132 | + """Test PRE-VALIDATION error scenario.""" |
| 133 | + input_file = generate_csv("PHYLIS", "TRUE", action_flag="CREATE", offset=7) |
| 134 | + |
| 135 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 136 | + self.uploaded_files.append(key) |
| 137 | + |
| 138 | + ack_key = await wait_for_ack_file(None, input_file) |
| 139 | + self.ack_files.append(ack_key) |
| 140 | + |
| 141 | + validate_row_count(input_file, ack_key) |
| 142 | + |
| 143 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 144 | + check_ack_file_content(ack_content, "Fatal Error", PRE_VALIDATION_ERROR, None) |
| 145 | + |
| 146 | + async def test_post_validation_error(self): |
| 147 | + """Test POST-VALIDATION error scenario.""" |
| 148 | + input_file = generate_csv("", "0.3", action_flag="CREATE", offset=8) |
| 149 | + |
| 150 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 151 | + self.uploaded_files.append(key) |
| 152 | + |
| 153 | + ack_key = await wait_for_ack_file(None, input_file) |
| 154 | + self.ack_files.append(ack_key) |
| 155 | + |
| 156 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 157 | + check_ack_file_content(ack_content, "Fatal Error", POST_VALIDATION_ERROR, None) |
| 158 | + |
| 159 | + async def test_file_name_validation_error(self): |
| 160 | + """Test FILE-NAME-VALIDATION error scenario.""" |
| 161 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", file_key=True, offset=9) |
| 162 | + |
| 163 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 164 | + self.uploaded_files.append(key) |
| 165 | + |
| 166 | + ack_key = await wait_for_ack_file(True, input_file) |
| 167 | + self.ack_files.append(ack_key) |
| 168 | + |
| 169 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 170 | + check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) |
| 171 | + |
| 172 | + async def test_header_name_validation_error(self): |
| 173 | + """Test HEADER-NAME-VALIDATION error scenario.""" |
| 174 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", headers="NH_NUMBER", offset=10) |
| 175 | + |
| 176 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 177 | + self.uploaded_files.append(key) |
| 178 | + |
| 179 | + ack_key = await wait_for_ack_file(True, input_file) |
| 180 | + self.ack_files.append(ack_key) |
| 181 | + |
| 182 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 183 | + check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) |
| 184 | + |
| 185 | + # This test updates the permissions_config.json file from the imms-internal-dev-supplier-config |
| 186 | + # S3 bucket shared across multiple environments (PR environments, internal-dev, int, and ref). |
| 187 | + # Running this may modify permissions in these environments, causing unintended side effects. |
| 188 | + @unittest.skip("Modifies shared S3 permissions configuration") |
| 189 | + async def test_invalid_permission(self): |
| 190 | + """Test INVALID-PERMISSION error scenario.""" |
| 191 | + await upload_config_file("MMR_FULL") # permissions_config.json is updated here |
| 192 | + await asyncio.sleep(20) |
| 193 | + |
| 194 | + input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=11) |
| 195 | + |
| 196 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 197 | + self.uploaded_files.append(key) |
| 198 | + |
| 199 | + ack_key = await wait_for_ack_file(True, input_file) |
| 200 | + self.ack_files.append(ack_key) |
| 201 | + |
| 202 | + ack_content = await get_file_content_from_s3(ACK_BUCKET, ack_key) |
| 203 | + check_ack_file_content(ack_content, "Failure", FILE_NAME_VAL_ERROR, None) |
| 204 | + |
| 205 | + await upload_config_file("COVID19_FULL") |
| 206 | + await asyncio.sleep(20) |
| 207 | + |
| 208 | + else: |
| 209 | + async def test_end_to_end_speed_test_with_100000_rows(self): |
| 210 | + """Test end_to_end_speed_test_with_100000_rows scenario with full integration""" |
| 211 | + input_file = generate_csv_with_ordered_100000_rows(12) |
| 212 | + |
| 213 | + key = await upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX) |
| 214 | + self.uploaded_files.append(key) |
| 215 | + |
| 216 | + final_ack_key = await wait_for_ack_file(None, input_file, timeout=1800) |
| 217 | + self.ack_files.append(final_ack_key) |
| 218 | + |
| 219 | + response = await verify_final_ack_file(final_ack_key) |
| 220 | + assert response is True |
| 221 | + |
| 222 | + |
| 223 | +if __name__ == "__main__": |
| 224 | + unittest.main() |
0 commit comments