Skip to content

Commit f19134a

Browse files
Merge pull request #668 from NHSDigital/feature/made14-NRL-760-deploy-bluegreen
[NRL-760] Deployment changes for blue/green arch
2 parents f6990e9 + ec7e9d3 commit f19134a

File tree

23 files changed

+722
-136
lines changed

23 files changed

+722
-136
lines changed

.github/workflows/persistent-environment.yml

Lines changed: 154 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -117,11 +117,20 @@ jobs:
117117
name: build-artifacts
118118
path: dist
119119

120+
- name: Install zip
121+
run: sudo apt-get install zip
122+
123+
- name: Setup Python environment
124+
run: |
125+
poetry install --no-root
126+
source $(poetry env info --path)/bin/activate
127+
120128
- name: Terraform Init
121129
run: |
130+
inactive_stack=$(poetry run python ./scripts/get_env_config.py inactive-stack ${{ inputs.environment }})
122131
terraform -chdir=terraform/infrastructure init
123-
terraform -chdir=terraform/infrastructure workspace new ${{ inputs.environment }} || \
124-
terraform -chdir=terraform/infrastructure workspace select ${{ inputs.environment }}
132+
terraform -chdir=terraform/infrastructure workspace new ${inactive_stack} || \
133+
terraform -chdir=terraform/infrastructure workspace select ${inactive_stack}
125134
126135
- name: Terraform Plan
127136
run: |
@@ -180,11 +189,152 @@ jobs:
180189
account=$(echo '${{ inputs.environment }}' | cut -d '-' -f1)
181190
make truststore-pull-server ENV=${account}
182191
192+
- name: Install zip
193+
run: sudo apt-get install zip
194+
195+
- name: Setup Python environment
196+
run: |
197+
poetry install --no-root
198+
source $(poetry env info --path)/bin/activate
199+
183200
- name: Terraform Init
184201
run: |
202+
inactive_stack=$(poetry run python ./scripts/get_env_config.py inactive-stack ${{ inputs.environment }})
185203
terraform -chdir=terraform/infrastructure init
186-
terraform -chdir=terraform/infrastructure workspace new ${{ inputs.environment }} || \
187-
terraform -chdir=terraform/infrastructure workspace select ${{ inputs.environment }}
204+
terraform -chdir=terraform/infrastructure workspace new ${inactive_stack} || \
205+
terraform -chdir=terraform/infrastructure workspace select ${inactive_stack}
188206
189207
- name: Terraform Apply
190208
run: terraform -chdir=terraform/infrastructure apply tfplan
209+
210+
- name: Smoke Test
211+
run: make ENV=${{ inputs.environment }} test-smoke-internal
212+
213+
activate-stack:
214+
name: Activate - ${{ inputs.environment }}
215+
needs: [terraform-apply]
216+
runs-on: [self-hosted, ci]
217+
environment: ${{ inputs.environment }}
218+
219+
steps:
220+
- name: Git clone - ${{ inputs.branch_name }}
221+
uses: actions/checkout@v4
222+
with:
223+
ref: ${{ inputs.branch_name }}
224+
225+
- name: Setup asdf cache
226+
uses: actions/cache@v4
227+
with:
228+
path: ~/.asdf
229+
key: ${{ runner.os }}-asdf-${{ hashFiles('**/.tool-versions') }}
230+
restore-keys: |
231+
${{ runner.os }}-asdf-
232+
233+
- name: Install asdf
234+
uses: asdf-vm/actions/[email protected]
235+
236+
- name: Configure Management Credentials
237+
uses: aws-actions/configure-aws-credentials@v4
238+
with:
239+
aws-region: eu-west-2
240+
role-to-assume: ${{ secrets.MGMT_ROLE_ARN }}
241+
role-session-name: github-actions-ci-${{ inputs.environment }}-${{ github.run_id}}
242+
243+
- name: Install zip
244+
run: sudo apt-get install zip
245+
246+
- name: Setup Python environment
247+
run: |
248+
poetry install --no-root
249+
source $(poetry env info --path)/bin/activate
250+
251+
- name: Activate Stack
252+
run: |
253+
inactive_stack=$(poetry run python ./scripts/get_env_config.py inactive-stack ${{ inputs.environment }})
254+
poetry run python ./scripts/activate_stack.py ${inactive_stack} ${{ inputs.environment }}
255+
256+
post-release-verify:
257+
name: Verify - ${{ inputs.environment }}
258+
needs: [activate-stack]
259+
runs-on: [self-hosted, ci]
260+
environment: ${{ inputs.environment }}
261+
262+
steps:
263+
- name: Git clone - ${{ inputs.branch_name }}
264+
uses: actions/checkout@v4
265+
with:
266+
ref: ${{ inputs.branch_name }}
267+
268+
- name: Setup asdf cache
269+
uses: actions/cache@v4
270+
with:
271+
path: ~/.asdf
272+
key: ${{ runner.os }}-asdf-${{ hashFiles('**/.tool-versions') }}
273+
restore-keys: |
274+
${{ runner.os }}-asdf-
275+
276+
- name: Install asdf
277+
uses: asdf-vm/actions/[email protected]
278+
279+
- name: Configure Management Credentials
280+
uses: aws-actions/configure-aws-credentials@v4
281+
with:
282+
aws-region: eu-west-2
283+
role-to-assume: ${{ secrets.MGMT_ROLE_ARN }}
284+
role-session-name: github-actions-ci-${{ inputs.environment }}-${{ github.run_id}}
285+
286+
- name: Install zip
287+
run: sudo apt-get install zip
288+
289+
- name: Setup Python environment
290+
run: |
291+
poetry install --no-root
292+
source $(poetry env info --path)/bin/activate
293+
294+
- name: "Smoke Test"
295+
run: |
296+
make ENV=${{ inputs.environment }} test-smoke-external
297+
298+
rollback-stack:
299+
name: Rollback - ${{ inputs.environment }}
300+
needs: [post-release-verify]
301+
if: ${{ needs.post-release-verify.result == 'failure' }}
302+
runs-on: [self-hosted, ci]
303+
environment: ${{ inputs.environment }}
304+
305+
steps:
306+
- name: Git clone - ${{ inputs.branch_name }}
307+
uses: actions/checkout@v4
308+
with:
309+
ref: ${{ inputs.branch_name }}
310+
311+
- name: Setup asdf cache
312+
uses: actions/cache@v4
313+
with:
314+
path: ~/.asdf
315+
key: ${{ runner.os }}-asdf-${{ hashFiles('**/.tool-versions') }}
316+
restore-keys: |
317+
${{ runner.os }}-asdf-
318+
319+
- name: Install asdf
320+
uses: asdf-vm/actions/[email protected]
321+
322+
- name: Configure Management Credentials
323+
uses: aws-actions/configure-aws-credentials@v4
324+
with:
325+
aws-region: eu-west-2
326+
role-to-assume: ${{ secrets.MGMT_ROLE_ARN }}
327+
role-session-name: github-actions-ci-${{ inputs.environment }}-${{ github.run_id}}
328+
329+
- name: Install zip
330+
run: sudo apt-get install zip
331+
332+
- name: Setup Python environment
333+
run: |
334+
poetry install --no-root
335+
source $(poetry env info --path)/bin/activate
336+
337+
- name: Deactivate Stack
338+
run: |
339+
inactive_stack_name=$(poetry run python ./scripts/get_env_config.py inactive-stack ${{ inputs.environment }})
340+
poetry run python ./scripts/activate-stack.py ${inactive_stack_name}

Makefile

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ asdf-install: ## Install the required tools via ASDF
3636
configure: asdf-install check-warn ## Configure this project repo, including install dependencies
3737
cp scripts/commit-msg.py .git/hooks/prepare-commit-msg && chmod ug+x .git/hooks/*
3838
poetry install
39+
poetry run pre-commit install
3940

4041
check: ## Check the build environment is setup correctly
4142
@./scripts/check-build-environment.sh
@@ -80,6 +81,16 @@ test-features-integration: check-warn ## Run the BDD feature tests in the integr
8081
@echo "Running feature tests in the integration environment"
8182
behave --define="integration_test=true" --define="env=$(TF_WORKSPACE_NAME)" $(FEATURE_TEST_ARGS)
8283

84+
test-smoke-internal: check-warn ## Run the smoke tests against the internal environment
85+
@echo "Running smoke tests against the internal environment"
86+
#ENV=$(TF_WORKSPACE_NAME) SMOKE_TEST_MODE=mtls pytest ./tests/smoke $(SMOKE_TEST_ARGS)
87+
@echo "Skipping internal smoke tests (not yet implemented)"
88+
89+
test-smoke-external: check-warn ## Run the smoke tests for the external access points
90+
@echo "Running smoke tests for the external access points"
91+
#ENV=$(ENV) SMOKE_TEST_MODE=apigee pytest ./tests/smoke $(SMOKE_TEST_ARGS)
92+
@echo "Skipping external smoke tests (not yet implemented)"
93+
8394
test-performance-prepare:
8495
mkdir -p $(DIST_PATH)
8596
poetry run python tests/performance/environment.py setup $(TF_WORKSPACE_NAME)

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -105,4 +105,4 @@ env = [
105105
"AUTH_STORE=auth-store",
106106
"TABLE_NAME=unit-test-document-pointer"
107107
]
108-
pythonpath = ["."]
108+
pythonpath = [".", "./scripts"]

scripts/activate_stack.py

Lines changed: 150 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,150 @@
1+
#!/usr/bin/env python
2+
import json
3+
import sys
4+
import traceback
5+
6+
import aws_session_assume
7+
import fire
8+
9+
CONFIG_LOCK_STATE = "lock-state"
10+
CONFIG_INACTIVE_STACK = "inactive-stack"
11+
CONFIG_ACTIVE_STACK = "active-stack"
12+
CONFIG_DOMAIN_NAME = "domain-name"
13+
14+
STATE_LOCKED = "locked"
15+
STATE_OPEN = "open"
16+
VALID_LOCK_STATES = [STATE_LOCKED, STATE_OPEN]
17+
18+
19+
def _set_lock_state(
20+
lock_state: str, environment_config: dict, parameters_key: str, sm: any
21+
):
22+
if lock_state not in VALID_LOCK_STATES:
23+
raise ValueError(f"Invalid lock state: {lock_state}")
24+
25+
print(f"Setting environment config lock state to {lock_state}....")
26+
environment_config[CONFIG_LOCK_STATE] = lock_state
27+
sm.put_secret_value(
28+
SecretId=parameters_key, SecretString=json.dumps(environment_config)
29+
)
30+
print(f"Environment config lock state is now {lock_state}")
31+
32+
33+
def _update_and_unlock(environment_config: dict, parameters_key: str, sm: any):
34+
environment_config[CONFIG_LOCK_STATE] = STATE_OPEN
35+
36+
print(f"Updating environment config to: {environment_config}")
37+
sm.put_secret_value(
38+
SecretId=parameters_key, SecretString=json.dumps(environment_config)
39+
)
40+
41+
42+
def _switch_active_stack(stack_name: str, env_domain_name: str, session: any):
43+
# Change API mappings for APIGW
44+
print(f"Gathering data about APIs for {env_domain_name} for {stack_name}....")
45+
api_gw = session.client("apigateway")
46+
env_apis = {
47+
api["name"]: api["id"]
48+
for api in api_gw.get_rest_apis(limit=100)["items"]
49+
if api["name"].startswith(f"nhsd-nrlf--{stack_name}--")
50+
}
51+
52+
if len(env_apis) != 2:
53+
raise ValueError(
54+
f"Expected 2 APIs for stack {stack_name}, got {env_apis.keys()}"
55+
)
56+
57+
api_gwv2 = session.client("apigatewayv2")
58+
existing_mappings = {
59+
mapping["ApiMappingKey"]: mapping["ApiMappingId"]
60+
for mapping in api_gwv2.get_api_mappings(DomainName=env_domain_name)["Items"]
61+
}
62+
63+
if len(existing_mappings) != 2:
64+
raise ValueError(
65+
f"Expected 2 API mappings for domain {env_domain_name}, got {len(existing_mappings)}"
66+
)
67+
if "consumer" not in existing_mappings or "producer" not in existing_mappings:
68+
raise ValueError(
69+
f"Expected API mappings for consumer and producer, got {existing_mappings.keys()}"
70+
)
71+
72+
print(f"Switching APIGW for {env_domain_name} to point to {stack_name}")
73+
api_gwv2.update_api_mapping(
74+
ApiId=env_apis[f"nhsd-nrlf--{stack_name}--consumer"],
75+
DomainName=env_domain_name,
76+
ApiMappingId=existing_mappings["consumer"],
77+
Stage="production",
78+
)
79+
api_gwv2.update_api_mapping(
80+
ApiId=env_apis[f"nhsd-nrlf--{stack_name}--producer"],
81+
DomainName=env_domain_name,
82+
ApiMappingId=existing_mappings["producer"],
83+
Stage="production",
84+
)
85+
86+
87+
def activate_stack(stack_name: str, env: str, session: any):
88+
sm = session.client("secretsmanager")
89+
90+
parameters_key = f"nhsd-nrlf--{env}--env-config"
91+
response = sm.get_secret_value(SecretId=parameters_key)
92+
93+
environment_config = json.loads(response["SecretString"])
94+
print(f"Got environment config for {env}: {environment_config}")
95+
96+
lock_state = environment_config[CONFIG_LOCK_STATE]
97+
if lock_state != "open":
98+
print(
99+
f"Unable to activate stack as lock state is not open: {lock_state}",
100+
file=sys.stderr,
101+
)
102+
return
103+
104+
current_active_stack = environment_config[CONFIG_ACTIVE_STACK]
105+
if current_active_stack == stack_name:
106+
print("Cannot activate stack, stack is already active", file=sys.stderr)
107+
return
108+
109+
_set_lock_state(
110+
STATE_LOCKED,
111+
environment_config=environment_config,
112+
parameters_key=parameters_key,
113+
sm=sm,
114+
)
115+
116+
try:
117+
domain_name = environment_config[CONFIG_DOMAIN_NAME]
118+
119+
print(f"Activating stack {stack_name} for {domain_name}....")
120+
_switch_active_stack(stack_name, env_domain_name=domain_name, session=session)
121+
except Exception as err:
122+
print(
123+
"Failed to switch active stack. Unlocking and bailing out....",
124+
file=sys.stderr,
125+
)
126+
_set_lock_state(
127+
STATE_OPEN,
128+
environment_config=environment_config,
129+
parameters_key=parameters_key,
130+
sm=sm,
131+
)
132+
print(f"Failed to activate stack: {err}", file=sys.stderr)
133+
print(f"Stack trace: {traceback.format_exc()}", file=sys.stderr)
134+
return
135+
136+
print("Updating environment config and unlocking....")
137+
environment_config[CONFIG_INACTIVE_STACK] = current_active_stack
138+
environment_config[CONFIG_ACTIVE_STACK] = stack_name
139+
_update_and_unlock(environment_config, parameters_key=parameters_key, sm=sm)
140+
141+
print(f"Complete. Stack {stack_name} is now the active stack for {env}")
142+
143+
144+
def main(stack_name: str, env: str):
145+
boto_session = aws_session_assume.get_boto_session(env)
146+
activate_stack(stack_name, env=env, session=boto_session)
147+
148+
149+
if __name__ == "__main__":
150+
fire.Fire(main)

0 commit comments

Comments
 (0)