Skip to content

Commit e5928c1

Browse files
authored
Merge branch 'master' into VED-971-Mesh-Processor-Alerting
2 parents 67da503 + 857e32b commit e5928c1

File tree

14 files changed

+109
-37
lines changed

14 files changed

+109
-37
lines changed

.github/workflows/create-release-tag.yml

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -29,9 +29,7 @@ jobs:
2929
run: pip install semver
3030

3131
- name: Set SPEC_VERSION env var
32-
run: echo ::set-env name=SPEC_VERSION::$(python utilities/scripts/calculate_version.py)
33-
env:
34-
ACTIONS_ALLOW_UNSECURE_COMMANDS: true
32+
run: echo "SPEC_VERSION=$(python utilities/scripts/calculate_version.py)" >> $GITHUB_ENV
3533

3634
- name: Create release (master only)
3735
id: create-release

.github/workflows/deploy-backend.yml

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -48,14 +48,13 @@ env: # Sonarcloud - do not allow direct usage of untrusted data
4848
ENVIRONMENT: ${{ inputs.environment }}
4949
SUB_ENVIRONMENT: ${{ inputs.sub_environment }}
5050

51-
permissions:
52-
id-token: write
53-
contents: read
54-
5551
run-name: Deploy Backend - ${{ inputs.environment }} ${{ inputs.sub_environment }}
5652

5753
jobs:
5854
terraform-plan:
55+
permissions:
56+
id-token: write
57+
contents: read
5958
runs-on: ubuntu-latest
6059
environment:
6160
name: ${{ inputs.environment }}
@@ -89,6 +88,9 @@ jobs:
8988
path: infrastructure/instance/tfplan
9089

9190
terraform-apply:
91+
permissions:
92+
id-token: write
93+
contents: read
9294
needs: terraform-plan
9395
runs-on: ubuntu-latest
9496
environment:

.github/workflows/run-e2e-tests.yml

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -54,12 +54,10 @@ env:
5454
STATUS_API_KEY: ${{ secrets.STATUS_API_KEY }}
5555
SOURCE_COMMIT_ID: ${{ github.sha }}
5656

57-
permissions:
58-
id-token: write
59-
contents: read
60-
6157
jobs:
6258
wait-for-deployment:
59+
permissions:
60+
contents: read
6361
runs-on: ubuntu-latest
6462
environment: ${{ inputs.apigee_environment }}
6563
outputs:
@@ -109,6 +107,9 @@ jobs:
109107
fi
110108
111109
e2e-tests:
110+
permissions:
111+
id-token: write
112+
contents: read
112113
runs-on: ubuntu-latest
113114
needs: [wait-for-deployment]
114115
environment: ${{ inputs.apigee_environment }}
@@ -202,6 +203,9 @@ jobs:
202203
run: poetry run python -m unittest
203204

204205
batch-e2e-tests:
206+
permissions:
207+
id-token: write
208+
contents: read
205209
needs: [wait-for-deployment, e2e-tests]
206210
# Only actually depend on wait-for-deployment, but run after e2e-tests
207211
if: ${{ !cancelled() && needs.wait-for-deployment.result == 'success' && needs.wait-for-deployment.outputs.RUN_BATCH_E2E_TESTS == 'true' }}

Makefile

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,9 @@ publish: clean
3232
cp build/immunisation-fhir-api.json sandbox/
3333
cp -r specification sandbox/specification
3434

35+
make serve: publish
36+
npm run serve
37+
3538
#Creates a minified OAS spec in JSON for sending to APIM
3639
oas: publish
3740
mkdir -p oas

README.specification.md

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,12 +55,13 @@ There are `make` commands that alias some of this functionality:
5555

5656
- `lint` -- Lints the spec and code
5757
- `publish` -- Outputs the specification as a **single** JSON file into the `build/` directory
58+
- `serve` -- Serves a preview of the API document locally
5859

5960
### Modifying the OAS file
6061

6162
Note that the master OAS file is now the **YAML** version, as it is far easier to maintain than the JSON.
6263

63-
To review your modifications, use Swagger Editor (https://editor.swagger.io).
64+
To review your modifications, use Swagger Editor (https://editor.swagger.io). Or, alternatively, use the `make serve` command.
6465

6566
### Update the OAS file to the public website
6667

azure/templates/build.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ steps:
44
npm run publish 2> /dev/null
55
cp build/immunisation-fhir-api.json sandbox/
66
7-
cd sandbox
7+
cd sandbox
88
docker build -t sandbox .
99
displayName: Build sandbox image
1010
workingDirectory: "$(Pipeline.Workspace)/s/$(SERVICE_NAME)"

infrastructure/instance/id_sync_lambda.tf

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -309,6 +309,38 @@ resource "aws_cloudwatch_log_group" "id_sync_log_group" {
309309
retention_in_days = 30
310310
}
311311

312+
resource "aws_cloudwatch_log_metric_filter" "id_sync_error_logs" {
313+
count = var.error_alarm_notifications_enabled ? 1 : 0
314+
315+
name = "${local.short_prefix}-IdSyncErrorLogsFilter"
316+
pattern = "%\\[ERROR\\]%"
317+
log_group_name = aws_cloudwatch_log_group.id_sync_log_group.name
318+
319+
metric_transformation {
320+
name = "${local.short_prefix}-IdSyncErrorLogs"
321+
namespace = "${local.short_prefix}-IdSyncLambda"
322+
value = "1"
323+
}
324+
}
325+
326+
resource "aws_cloudwatch_metric_alarm" "id_sync_error_alarm" {
327+
count = var.error_alarm_notifications_enabled ? 1 : 0
328+
329+
alarm_name = "${local.short_prefix}-id-sync-lambda-error"
330+
comparison_operator = "GreaterThanOrEqualToThreshold"
331+
evaluation_periods = 1
332+
metric_name = "${local.short_prefix}-IdSyncErrorLogs"
333+
namespace = "${local.short_prefix}-IdSyncLambda"
334+
period = 120
335+
statistic = "Sum"
336+
threshold = 1
337+
alarm_description = "This sets off an alarm for any error logs found in the id sync (nhs number change) Lambda function"
338+
alarm_actions = [data.aws_sns_topic.imms_system_alert_errors.arn]
339+
treat_missing_data = "notBreaching"
340+
}
341+
342+
343+
312344
# delete config_lambda_notification / new_s3_invoke_permission - not required; duplicate
313345

314346
# NEW

infrastructure/instance/redis_sync_lambda.tf

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,36 @@ resource "aws_cloudwatch_log_group" "redis_sync_log_group" {
253253
retention_in_days = 30
254254
}
255255

256+
resource "aws_cloudwatch_log_metric_filter" "redis_sync_error_logs" {
257+
count = var.error_alarm_notifications_enabled ? 1 : 0
258+
259+
name = "${local.short_prefix}-RedisSyncErrorLogsFilter"
260+
pattern = "%\\[ERROR\\]%"
261+
log_group_name = aws_cloudwatch_log_group.redis_sync_log_group.name
262+
263+
metric_transformation {
264+
name = "${local.short_prefix}-RedisSyncErrorLogs"
265+
namespace = "${local.short_prefix}-RedisSyncLambda"
266+
value = "1"
267+
}
268+
}
269+
270+
resource "aws_cloudwatch_metric_alarm" "redis_sync_error_alarm" {
271+
count = var.error_alarm_notifications_enabled ? 1 : 0
272+
273+
alarm_name = "${local.short_prefix}-id-sync-lambda-error"
274+
comparison_operator = "GreaterThanOrEqualToThreshold"
275+
evaluation_periods = 1
276+
metric_name = "${local.short_prefix}-RedisSyncErrorLogs"
277+
namespace = "${local.short_prefix}-RedisSyncLambda"
278+
period = 120
279+
statistic = "Sum"
280+
threshold = 1
281+
alarm_description = "This sets off an alarm for any error logs found in the redis sync Lambda function"
282+
alarm_actions = [data.aws_sns_topic.imms_system_alert_errors.arn]
283+
treat_missing_data = "notBreaching"
284+
}
285+
256286
# S3 Bucket notification to trigger Lambda function for config bucket
257287
resource "aws_s3_bucket_notification" "config_lambda_notification" {
258288

infrastructure/proxies/sandbox/apiproxy/policies/AssignMessage.AddCors.xml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -4,8 +4,8 @@
44
<Set>
55
<Headers>
66
<Header name="Access-Control-Allow-Origin">{request.header.origin}</Header>
7-
<Header name="Access-Control-Allow-Headers">origin, x-requested-with, accept, content-type, nhsd-session-urid, X-Request-Id, X-Correlation-Id, Location</Header>
8-
<Header name="Access-Control-Expose-Headers">origin, x-requested-with, accept, content-type, nhsd-session-urid, X-Request-Id, X-Correlation-Id, Location</Header>
7+
<Header name="Access-Control-Allow-Headers">origin, x-requested-with, accept, content-type, e-tag, nhsd-session-urid, X-Request-Id, X-Correlation-Id, Location</Header>
8+
<Header name="Access-Control-Expose-Headers">origin, x-requested-with, accept, content-type, e-tag, nhsd-session-urid, X-Request-Id, X-Correlation-Id, Location</Header>
99
<Header name="Access-Control-Max-Age">3628800</Header>
1010
<Header name="Access-Control-Allow-Methods">GET, PUT, POST, DELETE</Header>
1111
</Headers>

lambdas/backend/src/controller/parameter_parser.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -166,7 +166,7 @@ def parse_search_params(search_params_in_req: dict[str, list[str]]) -> dict[str,
166166
"""Ensures the search params provided in the event do not contain duplicated keys. Will split the parameters
167167
provided by comma separators. Raises a ParameterExceptionError for duplicated keys. Existing business logic stipulated
168168
that the API only accepts comma separated values rather than multi-value."""
169-
if any([len(values) > 1 for _, values in search_params_in_req.items()]):
169+
if any(len(values) > 1 for _, values in search_params_in_req.items()):
170170
raise ParameterExceptionError(DUPLICATED_PARAMETERS_ERROR_MESSAGE)
171171

172172
parsed_params = {}

0 commit comments

Comments
 (0)