-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathcanary.tf
More file actions
71 lines (65 loc) · 2.68 KB
/
canary.tf
File metadata and controls
71 lines (65 loc) · 2.68 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
################################################################################
# CloudWatch Synthetics Canary (periodic health check)
################################################################################
# Make a separate S3 bucket for the canary so that things like retention
# policies aren't muddled
resource "aws_s3_bucket" "ami_connect_canaries_s3_bucket_name" {
bucket = var.ami_connect_canaries_s3_bucket_name
force_destroy = true
}
locals {
# We will use the JS file's md5 to trigger a deploy to canary/lambda when the script changes
canary_code_hash = filemd5("${path.module}/canary/airflow_canary.js")
}
# Zip the JS file to prep for upload to S3
data "archive_file" "airflow_canary_zip" {
type = "zip"
source_file = "${path.module}/canary/airflow_canary.js"
output_path = "${path.module}/build/airflow_canary-${local.canary_code_hash}.zip"
}
# Upload the zipped JS file to S3 so canary/lambda can use it
resource "aws_s3_object" "airflow_canary_code" {
bucket = var.ami_connect_canaries_s3_bucket_name
key = "canaries/airflow_canary-${local.canary_code_hash}.zip"
source = data.archive_file.airflow_canary_zip.output_path
etag = filemd5(data.archive_file.airflow_canary_zip.output_path)
}
# Create the canary, which runs a lambda that calls the JS file
resource "aws_synthetics_canary" "airflow_healthcheck" {
name = "airflow-site-healthcheck"
artifact_s3_location = "s3://${var.ami_connect_canaries_s3_bucket_name}/canaries/"
execution_role_arn = aws_iam_role.ami_connect_pipeline.arn
handler = "airflow_canary.handler"
runtime_version = "syn-nodejs-puppeteer-12.0"
schedule {
expression = "rate(5 minutes)"
}
s3_bucket = var.ami_connect_canaries_s3_bucket_name
s3_key = aws_s3_object.airflow_canary_code.key
s3_version = aws_s3_object.airflow_canary_code.version_id
run_config {
timeout_in_seconds = 60
environment_variables = {
AIRFLOW_HOSTNAME = var.airflow_hostname
}
}
success_retention_period = 30
failure_retention_period = 30
start_canary = true
}
# Alarm when canary check fails
resource "aws_cloudwatch_metric_alarm" "airflow_down_alarm" {
alarm_name = "ami-connect-airflow-site-down"
comparison_operator = "LessThanThreshold"
evaluation_periods = 2
metric_name = "SuccessPercent"
namespace = "CloudWatchSynthetics"
period = 300
statistic = "Average"
threshold = 90
alarm_description = "Alert when AMI Connect Airflow site is down"
alarm_actions = [aws_sns_topic.ami_connect_airflow_alerts.arn]
dimensions = {
CanaryName = aws_synthetics_canary.airflow_healthcheck.name
}
}