terraform-aws-eks-blueprints
terraform-aws-eks-blueprints copied to clipboard
[Bug]: Adot/OpenTelemetry & Cert-Manager IAM policy issue
Welcome to Amazon EKS Blueprints!
- [X] Yes, I've searched similar issues on GitHub and didn't find any.
Amazon EKS Blueprints Release version
4.6.1
What is your environment, configuration and the example used?
- Cluster version
1.21
and tested against those tags v4.5.0. 4.6.0 & 4.6.1`
What did you do and What did you see instead?
- Upgrade from eks-blueprints module from version
4.0.9
to4.6.1
; Facing those issues: - Prometheus pending-install
- Ingress-Nginx failed create namespace, except when adding create
create_namespace = true
- opentermetry and cert-manager complain through the error below, if both are enabled
true
- require sacrifice with one of them to deploy successful
Error: error creating IAM Policy fabric-stg-ops-eks-cert-manager-irsa: EntityAlreadyExists: A policy called fabric-stg-ops-eks-cert-manager-irsa already exists. Duplicate names are not allowed. status code: 409, request id: d35f5501-bb5b-4832-adc7-75eaf9391cd2
with module.eks_addons_0.module.opentelemetry_operator[0].module.cert_manager.aws_iam_policy.cert_manager
on .terraform/modules/eks_addons_0/modules/kubernetes-addons/cert-manager/main.tf line 42, in resource "aws_iam_policy" "cert_manager":
resource "aws_iam_policy" "cert_manager" {
Error: namespaces "opentelemetry-operator-system" already exists
with module.eks_addons_0.module.opentelemetry_operator[0].kubernetes_namespace_v1.adot[0]
on .terraform/modules/eks_addons_0/modules/kubernetes-addons/opentelemetry-operator/main.tf line 10, in resource "kubernetes_namespace_v1" "adot":
resource "kubernetes_namespace_v1" "adot" {
Additional Information
- Terraform Cloud and MacOS
- Terraform version 1.1.9
- verstons.tf
# ---------------------------------------------------------------------------------------------------------------------
# Terraform version constraints
# ---------------------------------------------------------------------------------------------------------------------
terraform {
required_version = ">= 1.0.0"
required_providers {
aws = {
source = "hashicorp/aws"
version = ">= 3.72, >= 4.10"
}
kubernetes = {
source = "hashicorp/kubernetes"
version = ">= 2.10"
}
helm = {
source = "hashicorp/helm"
version = ">= 2.4.1"
}
local = {
source = "hashicorp/local"
version = ">= 2.1"
}
null = {
source = "hashicorp/null"
version = ">= 3.1"
}
http = {
source = "terraform-aws-modules/http"
version = "2.4.1"
}
kubectl = {
source = "gavinbunney/kubectl"
version = ">= 1.14"
}
random = {
source = "hashicorp/random"
version = ">= 2.2"
}
awsutils = {
source = "cloudposse/awsutils"
version = ">= 0.11.0"
}
tfe = {
source = "hashicorp/tfe"
version = "~> 0.30.2"
}
grafana = {
source = "grafana/grafana"
version = ">= 1.13.3"
}
# tls = {
# source = "hashicorp/tls"
# version = "3.4.0"
# }
}
}
EKS-Addons Example:
module "eks_addons_0" {
# source = "git::ssh://[email protected]/b7hio/terraform-aws-eks-blueprints.git?ref=v4.6.1//modules/kubernetes-addons"
source = "git::ssh://[email protected]/b7hio/terraform-aws-eks-blueprints.git//modules/kubernetes-addons?ref=v4.6.1"
eks_cluster_id = module.eks_0.eks_cluster_id
eks_cluster_endpoint = module.eks_0.eks_cluster_endpoint
eks_oidc_provider = module.eks_0.oidc_provider
eks_cluster_version = module.eks_0.eks_cluster_version
eks_cluster_domain = var.hosted_name
#---------------------------------------------------------------
# Amazon EKS Managed ADD-ON
#---------------------------------------------------------------
## VPC-CNI
enable_amazon_eks_vpc_cni = true
amazon_eks_vpc_cni_config = {
addon_version = data.aws_eks_addon_version.latest["vpc-cni"].version
resolve_conflicts = "OVERWRITE"
}
## CoreDNS
enable_amazon_eks_coredns = true
amazon_eks_coredns_config = {
addon_version = data.aws_eks_addon_version.latest["coredns"].version
resolve_conflicts = "OVERWRITE"
}
## Kube-Proxy
enable_amazon_eks_kube_proxy = true
amazon_eks_kube_proxy_config = {
addon_version = data.aws_eks_addon_version.default["kube-proxy"].version
resolve_conflicts = "OVERWRITE"
}
## EBS-CSI Driver
enable_amazon_eks_aws_ebs_csi_driver = false
#---------------------------------------------------------------
# K8s ADD-ONS
#---------------------------------------------------------------
enable_external_secrets = true
external_secrets_helm_config = {
chart = "external-secrets"
repository = "https://charts.external-secrets.io/"
version = "0.5.6"
}
enable_external_dns = true
external_dns_helm_config = {
chart = "external-dns"
repository = "https://charts.bitnami.com/bitnami"
version = "6.1.6"
}
enable_cluster_autoscaler = true
cluster_autoscaler_helm_config = {
chart = "cluster-autoscaler"
repository = "https://kubernetes.github.io/autoscaler"
version = "9.15.0"
set = [
{
name = "extraArgs.expander"
value = "priority"
},
{
name = "expanderPriorities"
value = <<-EOT
100:
- .*-spot-2vcpu-8mem.*
90:
- .*-spot-4vcpu-16mem.*
10:
- .*
EOT
}
]
}
# Prometheus and Amazon Managed Prometheus integration
enable_opentelemetry_operator = true
enable_adot_collector_java = false
enable_adot_collector_nginx = true
enable_prometheus = true
prometheus_helm_config = {
chart = "prometheus"
repository = "https://prometheus-community.github.io/helm-charts"
version = "15.3.0"
namespace = "prometheus"
}
enable_amazon_prometheus = true
amazon_prometheus_workspace_endpoint = module.managed_prometheus.workspace_prometheus_endpoint
amazon_prometheus_workspace_region = var.aws_region
enable_aws_for_fluentbit = true
aws_for_fluentbit_helm_config = {
name = "aws-for-fluent-bit"
chart = "aws-for-fluent-bit"
repository = "https://aws.github.io/eks-charts"
version = "0.1.11"
namespace = "logging"
aws_for_fluent_bit_cw_log_group = "/${module.eks_0.eks_cluster_id}/worker-fluentbit-logs" # Optional
aws_for_fluentbit_cwlog_retention_in_days = 90
create_namespace = true
values = [templatefile("${path.module}/helm_values/aws-for-fluentbit-values.yaml", {
region = var.aws_region
aws_for_fluent_bit_cw_log_group = "/${module.eks_0.eks_cluster_id}/worker-fluentbit-logs"
})]
set = [
{
name = "nodeSelector.kubernetes\\.io/os"
value = "linux"
}
]
}
enable_argo_rollouts = false
argo_rollouts_helm_config = {
name = "argo-rollouts"
chart = "argo-rollouts"
repository = "https://argoproj.github.io/argo-helm"
version = "2.14.0"
namespace = "argo-rollouts"
}
enable_metrics_server = true
metrics_server_helm_config = {
chart = "metrics-server"
repository = "https://kubernetes-sigs.github.io/metrics-server/"
version = "3.8.1"
}
enable_cert_manager = true
cert_manager_helm_config = {
chart = "cert-manager"
repository = "https://charts.jetstack.io"
version = "v1.7.1"
}
enable_keda = true
keda_helm_config = {
chart = "keda"
repository = "https://kedacore.github.io/charts"
version = "2.6.2"
}
enable_aws_load_balancer_controller = true
aws_load_balancer_controller_helm_config = {
chart = "aws-load-balancer-controller"
repository = "https://aws.github.io/eks-charts"
version = "1.4.1"
}
enable_vpa = true
vpa_helm_config = {
name = "vpa"
chart = "vpa"
repository = "https://charts.fairwinds.com/stable"
version = "1.0.0"
namespace = "vpa"
values = [templatefile("${path.module}/helm_values/vpa-values.yaml", {})]
}
enable_ingress_nginx = true
ingress_nginx_helm_config = {
create_namespace = true
values = [templatefile("${path.module}/helm_values/nginx-values.yaml", {
hostname = var.hosted_name
ssl_cert_arn = module.stg_acm.acm_certificate_arn
})]
}
enable_tetrate_istio = true
tetrate_istio_version = "1.13.5"
tetrate_istio_base_helm_config = {
}
# depends_on = [
# module.eks_0.managed_node_groups,
# module.vpc_0
# ]
}
Hello, This issue blocking Observability! Please update! Thanks