Skip to content

Commit 89c429f

Browse files
authored
Enable terraform plan access via dynamic Terraform roles (cloudposse/terraform-aws-components#715)
1 parent 2135f41 commit 89c429f

File tree

3 files changed

+23
-40
lines changed

3 files changed

+23
-40
lines changed

src/README.md

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -95,9 +95,7 @@ components:
9595
9696
| Name | Type |
9797
|------|------|
98-
| [aws_eks_cluster.kubernetes](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/eks_cluster) | data source |
9998
| [aws_eks_cluster_auth.eks](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/eks_cluster_auth) | data source |
100-
| [aws_eks_cluster_auth.kubernetes](https://registry.terraform.io/providers/hashicorp/aws/latest/docs/data-sources/eks_cluster_auth) | data source |
10199
102100
## Inputs
103101
@@ -121,8 +119,6 @@ components:
121119
| <a name="input_helm_manifest_experiment_enabled"></a> [helm\_manifest\_experiment\_enabled](#input\_helm\_manifest\_experiment\_enabled) | Enable storing of the rendered manifest for helm\_release so the full diff of what is changing can been seen in the plan | `bool` | `false` | no |
122120
| <a name="input_hostname_template"></a> [hostname\_template](#input\_hostname\_template) | The `format()` string to use to generate the hostname via `format(var.hostname_template, var.tenant, var.stage, var.environment)`"<br>Typically something like `"echo.%[3]v.%[2]v.example.com"`. | `string` | n/a | yes |
123121
| <a name="input_id_length_limit"></a> [id\_length\_limit](#input\_id\_length\_limit) | Limit `id` to this many characters (minimum 6).<br>Set to `0` for unlimited length.<br>Set to `null` for keep the existing setting, which defaults to `0`.<br>Does not affect `id_full`. | `number` | `null` | no |
124-
| <a name="input_import_profile_name"></a> [import\_profile\_name](#input\_import\_profile\_name) | AWS Profile name to use when importing a resource | `string` | `null` | no |
125-
| <a name="input_import_role_arn"></a> [import\_role\_arn](#input\_import\_role\_arn) | IAM Role ARN to use when importing a resource | `string` | `null` | no |
126122
| <a name="input_ingress_type"></a> [ingress\_type](#input\_ingress\_type) | Set to 'nginx' to create an ingress resource relying on an NGiNX backend for the echo-server service. Set to 'alb' to create an ingress resource relying on an AWS ALB backend for the echo-server service. Leave blank to not create any ingress for the echo-server service. | `string` | `null` | no |
127123
| <a name="input_kube_data_auth_enabled"></a> [kube\_data\_auth\_enabled](#input\_kube\_data\_auth\_enabled) | If `true`, use an `aws_eks_cluster_auth` data source to authenticate to the EKS cluster.<br>Disabled by `kubeconfig_file_enabled` or `kube_exec_auth_enabled`. | `bool` | `false` | no |
128124
| <a name="input_kube_exec_auth_aws_profile"></a> [kube\_exec\_auth\_aws\_profile](#input\_kube\_exec\_auth\_aws\_profile) | The AWS config profile for `aws eks get-token` to use | `string` | `""` | no |

src/provider-helm.tf

Lines changed: 17 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,12 @@
22
#
33
# This file is a drop-in to provide a helm provider.
44
#
5+
# It depends on 2 standard Cloud Posse data source modules to be already
6+
# defined in the same component:
7+
#
8+
# 1. module.iam_roles to provide the AWS profile or Role ARN to use to access the cluster
9+
# 2. module.eks to provide the EKS cluster information
10+
#
511
# All the following variables are just about configuring the Kubernetes provider
612
# to be able to modify EKS cluster. The reason there are so many options is
713
# because at various times, each one of them has had problems, so we give you a choice.
@@ -95,14 +101,16 @@ locals {
95101
"--profile", var.kube_exec_auth_aws_profile
96102
] : []
97103

98-
kube_exec_auth_role_arn = coalesce(var.kube_exec_auth_role_arn, var.import_role_arn, module.iam_roles.terraform_role_arn)
104+
kube_exec_auth_role_arn = coalesce(var.kube_exec_auth_role_arn, module.iam_roles.terraform_role_arn)
99105
exec_role = local.kube_exec_auth_enabled && var.kube_exec_auth_role_arn_enabled ? [
100106
"--role-arn", local.kube_exec_auth_role_arn
101107
] : []
102108

103-
certificate_authority_data = module.eks.outputs.eks_cluster_certificate_authority_data
104-
eks_cluster_id = module.eks.outputs.eks_cluster_id
105-
eks_cluster_endpoint = module.eks.outputs.eks_cluster_endpoint
109+
# Provide dummy configuration for the case where the EKS cluster is not available.
110+
certificate_authority_data = try(module.eks.outputs.eks_cluster_certificate_authority_data, "")
111+
# Use coalesce+try to handle both the case where the output is missing and the case where it is empty.
112+
eks_cluster_id = coalesce(try(module.eks.outputs.eks_cluster_id, ""), "missing")
113+
eks_cluster_endpoint = try(module.eks.outputs.eks_cluster_endpoint, "")
106114
}
107115

108116
data "aws_eks_cluster_auth" "eks" {
@@ -114,14 +122,14 @@ provider "helm" {
114122
kubernetes {
115123
host = local.eks_cluster_endpoint
116124
cluster_ca_certificate = base64decode(local.certificate_authority_data)
117-
token = local.kube_data_auth_enabled ? data.aws_eks_cluster_auth.eks[0].token : null
125+
token = local.kube_data_auth_enabled ? one(data.aws_eks_cluster_auth.eks[*].token) : null
118126
# The Kubernetes provider will use information from KUBECONFIG if it exists, but if the default cluster
119127
# in KUBECONFIG is some other cluster, this will cause problems, so we override it always.
120128
config_path = local.kubeconfig_file_enabled ? var.kubeconfig_file : ""
121129
config_context = var.kubeconfig_context
122130

123131
dynamic "exec" {
124-
for_each = local.kube_exec_auth_enabled ? ["exec"] : []
132+
for_each = local.kube_exec_auth_enabled && length(local.certificate_authority_data) > 0 ? ["exec"] : []
125133
content {
126134
api_version = local.kubeconfig_exec_auth_api_version
127135
command = "aws"
@@ -132,21 +140,21 @@ provider "helm" {
132140
}
133141
}
134142
experiments {
135-
manifest = var.helm_manifest_experiment_enabled
143+
manifest = var.helm_manifest_experiment_enabled && module.this.enabled
136144
}
137145
}
138146

139147
provider "kubernetes" {
140148
host = local.eks_cluster_endpoint
141149
cluster_ca_certificate = base64decode(local.certificate_authority_data)
142-
token = local.kube_data_auth_enabled ? data.aws_eks_cluster_auth.eks[0].token : null
150+
token = local.kube_data_auth_enabled ? one(data.aws_eks_cluster_auth.eks[*].token) : null
143151
# The Kubernetes provider will use information from KUBECONFIG if it exists, but if the default cluster
144152
# in KUBECONFIG is some other cluster, this will cause problems, so we override it always.
145153
config_path = local.kubeconfig_file_enabled ? var.kubeconfig_file : ""
146154
config_context = var.kubeconfig_context
147155

148156
dynamic "exec" {
149-
for_each = local.kube_exec_auth_enabled ? ["exec"] : []
157+
for_each = local.kube_exec_auth_enabled && length(local.certificate_authority_data) > 0 ? ["exec"] : []
150158
content {
151159
api_version = local.kubeconfig_exec_auth_api_version
152160
command = "aws"

src/providers.tf

Lines changed: 6 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,14 @@
11
provider "aws" {
22
region = var.region
33

4-
profile = module.iam_roles.profiles_enabled ? coalesce(var.import_profile_name, module.iam_roles.terraform_profile_name) : null
4+
# Profile is deprecated in favor of terraform_role_arn. When profiles are not in use, terraform_profile_name is null.
5+
profile = module.iam_roles.terraform_profile_name
6+
57
dynamic "assume_role" {
6-
for_each = module.iam_roles.profiles_enabled ? [] : ["role"]
8+
# module.iam_roles.terraform_role_arn may be null, in which case do not assume a role.
9+
for_each = compact([module.iam_roles.terraform_role_arn])
710
content {
8-
role_arn = coalesce(var.import_role_arn, module.iam_roles.terraform_role_arn)
11+
role_arn = module.iam_roles.terraform_role_arn
912
}
1013
}
1114
}
@@ -14,27 +17,3 @@ module "iam_roles" {
1417
source = "../../account-map/modules/iam-roles"
1518
context = module.this.context
1619
}
17-
18-
variable "import_profile_name" {
19-
type = string
20-
default = null
21-
description = "AWS Profile name to use when importing a resource"
22-
}
23-
24-
variable "import_role_arn" {
25-
type = string
26-
default = null
27-
description = "IAM Role ARN to use when importing a resource"
28-
}
29-
30-
data "aws_eks_cluster" "kubernetes" {
31-
count = local.enabled ? 1 : 0
32-
33-
name = module.eks.outputs.eks_cluster_id
34-
}
35-
36-
data "aws_eks_cluster_auth" "kubernetes" {
37-
count = local.enabled ? 1 : 0
38-
39-
name = module.eks.outputs.eks_cluster_id
40-
}

0 commit comments

Comments
 (0)