-
Notifications
You must be signed in to change notification settings - Fork 112
Open
Description
Hi, I created a cluster with RKE and registered it with Rancher like this:
rancher.tf
resource "kubectl_manifest" "metrics_server" {
for_each = var.enable_eks_rancher_import ? data.kubectl_file_documents.rancher_yaml_doc[0].manifests : {}
yaml_body = each.value
lifecycle {
prevent_destroy = true
}
}
resource "local_file" "rancher-agent-registration" {
count = var.enable_eks_rancher_import ? 1 : 0
filename = "${path.module}/${timestamp()}-rancher.yml"
content = data.http.rancher_yaml_data[count.index].body
}
data.tf
data "http" "rancher_yaml_data" {
count = var.enable_eks_rancher_import ? 1 : 0
url = var.import_eks_rancher_yaml_url
insecure = true
}
data "kubectl_file_documents" "rancher_yaml_doc" {
count = var.enable_eks_rancher_import ? 1 : 0
content = data.http.rancher_yaml_data[count.index].body
}
inputs.tf
variable "import_eks_rancher_yaml_url" {
type = string
default = ""
}
variable "enable_eks_rancher_import" {
type = bool
default = false
}
providers.tf
kubectl = {
source = "gavinbunney/kubectl"
version = "1.14.0"
}
provider "kubectl" {
host = local.kubernetes.host
cluster_ca_certificate = local.kubernetes.cluster_ca_certificate
exec {
api_version = "client.authentication.k8s.io/v1beta1"
args = ["eks", "get-token", "--cluster-name", var.cluster_name]
command = "aws"
}
}
terrafrom.tfvars
ENABLE_EKS_RANCHER_IMPORT = true
IMPORT_EKS_RANCHER_YAML_URL = "https://test-rancher.beta.nuxeocloud.com/v3/import/6dmcjwh7fwbxns5v87x9sxv52rkg7jqdkphjwps6wfv449tv74rhk4_c-m-pdzbws6p.yaml"
When I run Terrorform Apply for the first time, the cluster becomes active on Rancher UI.
Then, the next time I run Terrorform Apply, the Rancher agent gets disconnected from the cluster on the Rancher UI.
terraform apply output
# module.eks_post_config.kubectl_manifest.metrics_server["/apis/apps/v1/namespaces/cattle-system/deployments/cattle-cluster-agent"] will be updated in-place
~ resource "kubectl_manifest" "metrics_server" {
id = "/apis/apps/v1/namespaces/cattle-system/deployments/cattle-cluster-agent"
name = "cattle-cluster-agent"
~ yaml_incluster = (sensitive value)
# (14 unchanged attributes hidden)
}
# module.eks_post_config.local_file.rancher-agent-registration[0] must be replaced
-/+ resource "local_file" "rancher-agent-registration" {
~ content_base64sha256 = "kDcpe8B74ceAEt/cSh+MvI0d7/PwDL3vVjaFJn77210=" -> (known after apply)
~ content_base64sha512 = "kwUcIB6ywYv/3o/drOsXyaau/QL2cu4NAlHqMzw5THIcP6zFXzj1b3PgCPAqGsdrUgOaWOOvKPrvehd9CNuJfA==" -> (known after apply)
~ content_md5 = "6be6402179a0e7f7b5c15840a10d187a" -> (known after apply)
~ content_sha1 = "f6dc040c9c6db8e8edb1d7d97624ef76cf4e84d8" -> (known after apply)
~ content_sha256 = "9037297bc07be1c78012dfdc4a1f8cbc8d1deff3f00cbdef563685267efbdb5d" -> (known after apply)
~ content_sha512 = "93051c201eb2c18bffde8fddaceb17c9a6aefd02f672ee0d0251ea333c394c721c3facc55f38f56f73e008f02a1ac76b52039a58e3af28faef7a177d08db897c" -> (known after apply)
~ filename = "../../modules/eks_post_config/2024-05-07T10:22:17Z-rancher.yml" -> (known after apply) # forces replacement
~ id = "f6dc040c9c6db8e8edb1d7d97624ef76cf4e84d8" -> (known after apply)
# (3 unchanged attributes hidden)
}
And updates the cattle-cluster-agent. Why does this happen, I am not able to understand. Is this a bug from Terraform's side?
Is there any solution to this?
I am using the generic option to import the clusters on Rancher so that multiple clusters can be imported.
Is there any solution to this? Please help me.
Reactions are currently unavailable
Metadata
Metadata
Assignees
Labels
No labels

