Skip to content

Commit f1a56e1

Browse files
authored
enh: use flags to enabled/disable additional functionalities instead of using examples folder (#176)
1 parent 3f6b86c commit f1a56e1

38 files changed

+1194
-900
lines changed

README.md

Lines changed: 86 additions & 91 deletions
Large diffs are not rendered by default.

terraform/layer1-aws/README.md

Lines changed: 98 additions & 67 deletions
Large diffs are not rendered by default.

terraform/layer1-aws/examples/aws-ec2-pritunl.tf renamed to terraform/layer1-aws/aws-ec2-pritunl.tf

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
1+
#tfsec:ignore:aws-vpc-no-public-egress-sgr tfsec:ignore:aws-vpc-no-public-ingress-sgr
12
module "pritunl" {
2-
source = "../modules/aws-ec2-pritunl"
3+
count = var.pritunl_vpn_server_enable ? 1 : 0
34

5+
source = "../modules/aws-ec2-pritunl"
46
environment = local.env
57
vpc_id = module.vpc.vpc_id
68
public_subnets = module.vpc.public_subnets

terraform/layer1-aws/variables.tf

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -245,3 +245,9 @@ variable "eks_cluster_encryption_config_enable" {
245245
default = false
246246
description = "Enable or not encryption for k8s secrets with aws-kms"
247247
}
248+
249+
variable "pritunl_vpn_server_enable" {
250+
type = bool
251+
default = false
252+
description = "Indicates whether or not the Pritunl VPN server is deployed."
253+
}

terraform/layer2-k8s/.terraform.lock.hcl

Lines changed: 18 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

terraform/layer2-k8s/README.md

Lines changed: 169 additions & 32 deletions
Large diffs are not rendered by default.

terraform/layer2-k8s/eks-aws-loadbalancer-controller.tf

Lines changed: 20 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
11
locals {
2-
aws-load-balancer-controller = {
3-
chart = local.helm_charts[index(local.helm_charts.*.id, "aws-load-balancer-controller")].chart
4-
repository = lookup(local.helm_charts[index(local.helm_charts.*.id, "aws-load-balancer-controller")], "repository", null)
5-
chart_version = lookup(local.helm_charts[index(local.helm_charts.*.id, "aws-load-balancer-controller")], "version", null)
2+
aws_load_balancer_controller = {
3+
name = local.helm_releases[index(local.helm_releases.*.id, "aws-load-balancer-controller")].id
4+
enabled = local.helm_releases[index(local.helm_releases.*.id, "aws-load-balancer-controller")].enabled
5+
chart = local.helm_releases[index(local.helm_releases.*.id, "aws-load-balancer-controller")].chart
6+
repository = local.helm_releases[index(local.helm_releases.*.id, "aws-load-balancer-controller")].repository
7+
chart_version = local.helm_releases[index(local.helm_releases.*.id, "aws-load-balancer-controller")].version
8+
namespace = local.helm_releases[index(local.helm_releases.*.id, "aws-load-balancer-controller")].namespace
69
}
710
alb_ingress_controller = templatefile("${path.module}/templates/alb-ingress-controller-values.yaml",
811
{
9-
role_arn = var.aws_loadbalancer_controller_enable ? module.aws_iam_aws_loadbalancer_controller[0].role_arn : "",
12+
role_arn = local.aws_load_balancer_controller.enabled ? module.aws_iam_aws_loadbalancer_controller[0].role_arn : "",
1013
region = local.region,
1114
cluster_name = local.eks_cluster_id,
1215
vpc_id = local.vpc_id
@@ -15,10 +18,10 @@ locals {
1518

1619
#tfsec:ignore:kubernetes-network-no-public-egress tfsec:ignore:kubernetes-network-no-public-ingress
1720
module "aws_load_balancer_controller_namespace" {
18-
count = var.aws_loadbalancer_controller_enable ? 1 : 0
21+
count = local.aws_load_balancer_controller.enabled ? 1 : 0
1922

2023
source = "../modules/kubernetes-namespace"
21-
name = "aws-load-balancer-controller"
24+
name = local.aws_load_balancer_controller.namespace
2225
network_policies = [
2326
{
2427
name = "default-deny"
@@ -34,7 +37,7 @@ module "aws_load_balancer_controller_namespace" {
3437
{
3538
namespace_selector = {
3639
match_labels = {
37-
name = "aws-load-balancer-controller"
40+
name = local.aws_load_balancer_controller.namespace
3841
}
3942
}
4043
}
@@ -48,7 +51,7 @@ module "aws_load_balancer_controller_namespace" {
4851
match_expressions = {
4952
key = "app.kubernetes.io/name"
5053
operator = "In"
51-
values = ["aws-load-balancer-controller"]
54+
values = [local.aws_load_balancer_controller.name]
5255
}
5356
}
5457
ingress = {
@@ -89,10 +92,10 @@ module "aws_load_balancer_controller_namespace" {
8992

9093
#tfsec:ignore:aws-iam-no-policy-wildcards
9194
module "aws_iam_aws_loadbalancer_controller" {
92-
count = var.aws_loadbalancer_controller_enable ? 1 : 0
95+
count = local.aws_load_balancer_controller.enabled ? 1 : 0
9396

9497
source = "../modules/aws-iam-eks-trusted"
95-
name = "${local.name}-alb-ingress"
98+
name = "${local.name}-aws-lb-controller"
9699
region = local.region
97100
oidc_provider_arn = local.eks_oidc_provider_arn
98101
policy = jsonencode({
@@ -305,16 +308,17 @@ module "aws_iam_aws_loadbalancer_controller" {
305308
}
306309

307310
resource "helm_release" "aws_loadbalancer_controller" {
308-
count = var.aws_loadbalancer_controller_enable ? 1 : 0
311+
count = local.aws_load_balancer_controller.enabled ? 1 : 0
309312

310-
name = "aws-load-balancer-controller"
311-
chart = local.aws-load-balancer-controller.chart
312-
repository = local.aws-load-balancer-controller.repository
313-
version = local.aws-load-balancer-controller.chart_version
313+
name = local.aws_load_balancer_controller.name
314+
chart = local.aws_load_balancer_controller.chart
315+
repository = local.aws_load_balancer_controller.repository
316+
version = local.aws_load_balancer_controller.chart_version
314317
namespace = module.aws_load_balancer_controller_namespace[count.index].name
315318
max_history = var.helm_release_history_size
316319

317320
values = [
318321
local.alb_ingress_controller
319322
]
323+
320324
}

terraform/layer2-k8s/eks-aws-node-termination-handler.tf

Lines changed: 18 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,20 @@
11
locals {
2-
aws-node-termination-handler = {
3-
chart = local.helm_charts[index(local.helm_charts.*.id, "aws-node-termination-handler")].chart
4-
repository = lookup(local.helm_charts[index(local.helm_charts.*.id, "aws-node-termination-handler")], "repository", null)
5-
chart_version = lookup(local.helm_charts[index(local.helm_charts.*.id, "aws-node-termination-handler")], "version", null)
2+
aws_node_termination_handler = {
3+
name = local.helm_releases[index(local.helm_releases.*.id, "aws-node-termination-handler")].id
4+
enabled = local.helm_releases[index(local.helm_releases.*.id, "aws-node-termination-handler")].enabled
5+
chart = local.helm_releases[index(local.helm_releases.*.id, "aws-node-termination-handler")].chart
6+
repository = local.helm_releases[index(local.helm_releases.*.id, "aws-node-termination-handler")].repository
7+
chart_version = local.helm_releases[index(local.helm_releases.*.id, "aws-node-termination-handler")].version
8+
namespace = local.helm_releases[index(local.helm_releases.*.id, "aws-node-termination-handler")].namespace
69
}
710
}
811

912
#tfsec:ignore:kubernetes-network-no-public-egress tfsec:ignore:kubernetes-network-no-public-ingress
1013
module "aws_node_termination_handler_namespace" {
14+
count = local.aws_node_termination_handler.enabled ? 1 : 0
15+
1116
source = "../modules/kubernetes-namespace"
12-
name = "aws-node-termination-handler"
17+
name = local.aws_node_termination_handler.namespace
1318
network_policies = [
1419
{
1520
name = "default-deny"
@@ -25,7 +30,7 @@ module "aws_node_termination_handler_namespace" {
2530
{
2631
namespace_selector = {
2732
match_labels = {
28-
name = "aws-node-termination-handler"
33+
name = local.aws_node_termination_handler.namespace
2934
}
3035
}
3136
}
@@ -53,12 +58,13 @@ module "aws_node_termination_handler_namespace" {
5358
}
5459

5560
resource "helm_release" "aws_node_termination_handler" {
56-
name = "aws-node-termination-handler"
57-
chart = local.aws-node-termination-handler.chart
58-
repository = local.aws-node-termination-handler.repository
59-
version = local.aws-node-termination-handler.chart_version
60-
namespace = module.aws_node_termination_handler_namespace.name
61-
wait = false
61+
count = local.aws_node_termination_handler.enabled ? 1 : 0
62+
63+
name = local.aws_node_termination_handler.name
64+
chart = local.aws_node_termination_handler.chart
65+
repository = local.aws_node_termination_handler.repository
66+
version = local.aws_node_termination_handler.chart_version
67+
namespace = module.aws_node_termination_handler_namespace[count.index].name
6268
max_history = var.helm_release_history_size
6369

6470
values = [

terraform/layer2-k8s/eks-calico.tf

Lines changed: 16 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -1,25 +1,26 @@
11
locals {
2-
aws-calico = {
3-
chart = local.helm_charts[index(local.helm_charts.*.id, "aws-calico")].chart
4-
repository = lookup(local.helm_charts[index(local.helm_charts.*.id, "aws-calico")], "repository", null)
5-
chart_version = lookup(local.helm_charts[index(local.helm_charts.*.id, "aws-calico")], "version", null)
2+
aws_calico = {
3+
name = local.helm_releases[index(local.helm_releases.*.id, "aws-calico")].id
4+
enabled = local.helm_releases[index(local.helm_releases.*.id, "aws-calico")].enabled
5+
chart = local.helm_releases[index(local.helm_releases.*.id, "aws-calico")].chart
6+
repository = local.helm_releases[index(local.helm_releases.*.id, "aws-calico")].repository
7+
chart_version = local.helm_releases[index(local.helm_releases.*.id, "aws-calico")].version
8+
namespace = local.helm_releases[index(local.helm_releases.*.id, "aws-calico")].namespace
69
}
710
}
811

9-
data "template_file" "calico_daemonset" {
10-
template = file("${path.module}/templates/calico-values.yaml")
11-
}
12-
1312
resource "helm_release" "calico_daemonset" {
14-
name = "aws-calico"
15-
chart = local.aws-calico.chart
16-
repository = local.aws-calico.repository
17-
version = local.aws-calico.chart_version
18-
namespace = "kube-system"
13+
count = local.aws_calico.enabled ? 1 : 0
14+
15+
name = local.aws_calico.name
16+
chart = local.aws_calico.chart
17+
repository = local.aws_calico.repository
18+
version = local.aws_calico.chart_version
19+
namespace = local.aws_calico.namespace
1920
max_history = var.helm_release_history_size
20-
wait = false
2121

2222
values = [
23-
data.template_file.calico_daemonset.rendered,
23+
file("${path.module}/templates/calico-values.yaml")
2424
]
25+
2526
}

terraform/layer2-k8s/eks-cert-manager-certificate.tf

Lines changed: 0 additions & 33 deletions
This file was deleted.

0 commit comments

Comments
 (0)