Skip to content

Commit c09798f

Browse files
committed
Serial & Parallel tests
1 parent ac112fc commit c09798f

File tree

7 files changed

+298
-149
lines changed

7 files changed

+298
-149
lines changed

azure/templates/post-deploy.yml

Lines changed: 69 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -193,6 +193,73 @@ steps:
193193
fi
194194
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e"
195195
displayName: Run Full Test Suite
196+
condition: eq(2, 1) # Disable task but make this step visible in the pipeline
197+
198+
- bash: |
199+
pyenv local 3.11
200+
poetry env use 3.11
201+
set -e
202+
if ! [[ "$APIGEE_ENVIRONMENT" == "prod" || "$APIGEE_ENVIRONMENT" == "int" || "$APIGEE_ENVIRONMENT" == *"sandbox" ]]; then
203+
echo "Running E2E batch folder test cases (Parallel)"
204+
205+
export AWS_PROFILE="apim-dev"
206+
aws_account_no="$(aws sts get-caller-identity --query Account --output text)"
207+
echo "Using AWS Account: $aws_account_no"
208+
209+
service_name="${FULLY_QUALIFIED_SERVICE_NAME}"
210+
211+
pr_no=$(echo "$service_name" | { grep -oE '[0-9]+$' || true; })
212+
if [ -z "$pr_no" ]; then
213+
workspace="$APIGEE_ENVIRONMENT"
214+
else
215+
workspace="pr-$pr_no"
216+
fi
217+
218+
poetry install --no-root # Install dependencies defined in pyproject.toml
219+
220+
ENVIRONMENT="$workspace" poetry run python -m unittest -v -c
221+
echo "E2E batch folder test cases executed successfully"
222+
else
223+
echo "Skipping E2E batch folder test cases as the environment is prod-int-sandbox"
224+
fi
225+
226+
displayName: e2e serial
227+
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e_batch"
228+
condition: eq(1, 1) # Disable task but make this step visible in the pipeline
229+
230+
- bash: |
231+
pyenv local 3.11
232+
poetry env use 3.11
233+
set -e
234+
if ! [[ "$APIGEE_ENVIRONMENT" == "prod" || "$APIGEE_ENVIRONMENT" == "int" || "$APIGEE_ENVIRONMENT" == *"sandbox" ]]; then
235+
echo "Running E2E batch folder test cases (Parallel)"
236+
237+
export AWS_PROFILE="apim-dev"
238+
aws_account_no="$(aws sts get-caller-identity --query Account --output text)"
239+
echo "Using AWS Account: $aws_account_no"
240+
241+
service_name="${FULLY_QUALIFIED_SERVICE_NAME}"
242+
243+
pr_no=$(echo "$service_name" | { grep -oE '[0-9]+$' || true; })
244+
if [ -z "$pr_no" ]; then
245+
workspace="$APIGEE_ENVIRONMENT"
246+
else
247+
workspace="pr-$pr_no"
248+
fi
249+
250+
poetry install --no-root # Install dependencies defined in pyproject.toml
251+
252+
ENVIRONMENT="$workspace" time poetry run unittest-parallel -v -j 2
253+
254+
echo "E2E batch folder test cases executed successfully"
255+
else
256+
echo "Skipping E2E batch folder test cases as the environment is prod-int-sandbox"
257+
fi
258+
259+
displayName: e2e parallel 2
260+
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e_batch"
261+
condition: eq(1, 1) # Disable task but make this step visible in the pipeline
262+
196263
197264
- bash: |
198265
pyenv local 3.11
@@ -216,17 +283,16 @@ steps:
216283
217284
poetry install --no-root # Install dependencies defined in pyproject.toml
218285
219-
# ENVIRONMENT="$workspace" poetry run python -m unittest -v -c
220286
ENVIRONMENT="$workspace" time poetry run unittest-parallel -v -j 4
221287
222288
echo "E2E batch folder test cases executed successfully"
223289
else
224290
echo "Skipping E2E batch folder test cases as the environment is prod-int-sandbox"
225291
fi
226292
227-
displayName: Run batch test parallel
293+
displayName: e2e parallel 4
228294
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)/$(SERVICE_ARTIFACT_NAME)/e2e_batch"
229-
condition: eq(2, 1) # Disable task but make this step visible in the pipeline
295+
condition: eq(1, 1) # Disable task but make this step visible in the pipeline
230296
231297
- task: PublishTestResults@2
232298
displayName: 'Publish test results'

e2e_batch/Makefile

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22

33
APIGEE_ACCESS_TOKEN ?= $(shell export SSO_LOGIN_URL=https://login.apigee.com && eval get_token -u $(APIGEE_USERNAME))
44
AWS_DOMAIN_NAME=https://$(shell make -C ../terraform -s output name=service_domain_name || true)
5-
PARALLEL_WORKERS=2
5+
PARALLEL_WORKERS=4
66

77
print-token:
88
@echo "APIGEE_ACCESS_TOKEN=$(APIGEE_ACCESS_TOKEN)"
@@ -18,4 +18,4 @@ testp1:
1818
ENVIRONMENT=$(ENVIRONMENT) time poetry run unittest-parallel -v -j $(PARALLEL_WORKERS)
1919

2020
testp2:
21-
ENVIRONMENT=$(ENVIRONMENT) poetry run pytest -n $(PARALLEL_WORKERS) -v
21+
ENVIRONMENT=$(ENVIRONMENT) time poetry run pytest -n $(PARALLEL_WORKERS) -v -s

e2e_batch/test_e2e_batch.py

Lines changed: 5 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,8 @@
1818
environment
1919
)
2020

21+
OFFSET = 0 # Days to offset the recorded date by (can be negative)
22+
2123

2224
class TestE2EBatch(unittest.TestCase):
2325
def setUp(self):
@@ -40,7 +42,7 @@ def tearDown(self):
4042
def test_create_success(self):
4143
"""Test CREATE scenario."""
4244
monitor("test_create_success")
43-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE")
45+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=OFFSET)
4446

4547
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
4648
self.uploaded_files.append(key)
@@ -60,7 +62,7 @@ def test_duplicate_create(self):
6062

6163
monitor("test_duplicate_create")
6264

63-
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", same_id=True)
65+
input_file = generate_csv("PHYLIS", "0.3", action_flag="CREATE", offset=OFFSET, same_id=True)
6466

6567
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
6668
self.uploaded_files.append(key)
@@ -78,7 +80,7 @@ def test_duplicate_create(self):
7880
def test_update_success(self):
7981
"""Test UPDATE scenario."""
8082
monitor("test_update_success")
81-
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE")
83+
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE", offset=OFFSET)
8284

8385
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
8486
self.uploaded_files.append(key)
@@ -91,54 +93,3 @@ def test_update_success(self):
9193
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
9294
check_ack_file_content(ack_content, "OK", None, "UPDATE")
9395
monitor("test_update_success")
94-
95-
def test_reinstated_success(self):
96-
"""Test REINSTATED scenario."""
97-
monitor("test_reinstated_success")
98-
input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED")
99-
100-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
101-
self.uploaded_files.append(key)
102-
103-
ack_key = wait_for_ack_file(None, input_file)
104-
self.ack_files.append(ack_key)
105-
106-
validate_row_count(input_file, ack_key)
107-
108-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
109-
check_ack_file_content(ack_content, "OK", None, "reinstated")
110-
monitor("test_reinstated_success")
111-
112-
def test_update_reinstated_success(self):
113-
"""Test UPDATE-REINSTATED scenario."""
114-
monitor("test_update_reinstated_success")
115-
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED")
116-
117-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
118-
self.uploaded_files.append(key)
119-
120-
ack_key = wait_for_ack_file(None, input_file)
121-
self.ack_files.append(ack_key)
122-
123-
validate_row_count(input_file, ack_key)
124-
125-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
126-
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
127-
monitor("test_update_reinstated_success")
128-
129-
def test_delete_success(self):
130-
"""Test DELETE scenario."""
131-
monitor("test_delete_success")
132-
input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE")
133-
134-
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
135-
self.uploaded_files.append(key)
136-
137-
ack_key = wait_for_ack_file(None, input_file)
138-
self.ack_files.append(ack_key)
139-
140-
validate_row_count(input_file, ack_key)
141-
142-
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
143-
check_ack_file_content(ack_content, "OK", None, "DELETE")
144-
monitor("test_delete_success")

e2e_batch/test_e2e_batch1.py

Lines changed: 91 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
import unittest
2+
from utils import (
3+
generate_csv,
4+
upload_file_to_s3,
5+
get_file_content_from_s3,
6+
wait_for_ack_file,
7+
check_ack_file_content,
8+
validate_row_count,
9+
delete_file_from_s3
10+
)
11+
from per_test import monitor
12+
13+
from constants import (
14+
SOURCE_BUCKET,
15+
INPUT_PREFIX,
16+
ACK_BUCKET,
17+
environment
18+
)
19+
20+
OFFSET = 1 # Days to offset the recorded date by (can be negative)
21+
22+
23+
class TestE2EBatch(unittest.TestCase):
24+
def setUp(self):
25+
self.uploaded_files = [] # Tracks uploaded input keys
26+
self.ack_files = [] # Tracks ack keys
27+
28+
def tearDown(self):
29+
# get name of unit test
30+
unit_test_name = self._testMethodName
31+
marker = f"tearDown-{unit_test_name}"
32+
33+
monitor(marker, is_test=False)
34+
for file_key in self.uploaded_files:
35+
delete_file_from_s3(SOURCE_BUCKET, file_key)
36+
for ack_key in self.ack_files:
37+
delete_file_from_s3(ACK_BUCKET, ack_key)
38+
monitor(marker, is_test=False)
39+
40+
if environment != "ref":
41+
42+
def test_reinstated_success(self):
43+
"""Test REINSTATED scenario."""
44+
monitor("test_reinstated_success")
45+
input_file = generate_csv("PHYLIS", "0.5", action_flag="REINSTATED", offset=OFFSET)
46+
47+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
48+
self.uploaded_files.append(key)
49+
50+
ack_key = wait_for_ack_file(None, input_file)
51+
self.ack_files.append(ack_key)
52+
53+
validate_row_count(input_file, ack_key)
54+
55+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
56+
check_ack_file_content(ack_content, "OK", None, "reinstated")
57+
monitor("test_reinstated_success")
58+
59+
def test_update_reinstated_success(self):
60+
"""Test UPDATE-REINSTATED scenario."""
61+
monitor("test_update_reinstated_success")
62+
input_file = generate_csv("PHYLIS", "0.5", action_flag="UPDATE-REINSTATED", offset=OFFSET)
63+
64+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
65+
self.uploaded_files.append(key)
66+
67+
ack_key = wait_for_ack_file(None, input_file)
68+
self.ack_files.append(ack_key)
69+
70+
validate_row_count(input_file, ack_key)
71+
72+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
73+
check_ack_file_content(ack_content, "OK", None, "update-reinstated")
74+
monitor("test_update_reinstated_success")
75+
76+
def test_delete_success(self):
77+
"""Test DELETE scenario."""
78+
monitor("test_delete_success")
79+
input_file = generate_csv("PHYLIS", "0.8", action_flag="DELETE", offset=OFFSET)
80+
81+
key = upload_file_to_s3(input_file, SOURCE_BUCKET, INPUT_PREFIX)
82+
self.uploaded_files.append(key)
83+
84+
ack_key = wait_for_ack_file(None, input_file)
85+
self.ack_files.append(ack_key)
86+
87+
validate_row_count(input_file, ack_key)
88+
89+
ack_content = get_file_content_from_s3(ACK_BUCKET, ack_key)
90+
check_ack_file_content(ack_content, "OK", None, "DELETE")
91+
monitor("test_delete_success")

0 commit comments

Comments
 (0)