diff --git a/main.tf b/main.tf index b9087b6..2443674 100644 --- a/main.tf +++ b/main.tf @@ -199,6 +199,10 @@ module "eks" { aws_auth_users = local.eks_aws_auth_users tags = var.default_tags + + depends_on = [ + module.vpc + ] } # A resource to create a Kubernetes namespace. @@ -208,6 +212,17 @@ resource "kubernetes_namespace_v1" "this" { metadata { name = var.seqera_namespace_name } + + depends_on = [ + module.eks + ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } # A resource to create a Kubernetes service account within the specified namespace. @@ -224,7 +239,17 @@ resource "kubernetes_service_account_v1" "this" { automount_service_account_token = true - depends_on = [kubernetes_namespace_v1.this] # Ensures the namespace is created before the service account. + depends_on = [ + module.eks, + kubernetes_namespace_v1.this + ] # Ensures the namespace is created before the service account. + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } # DB Secret @@ -245,6 +270,13 @@ resource "kubernetes_secret_v1" "db_app_password" { depends_on = [ module.eks ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } # Config Map @@ -265,17 +297,24 @@ resource "kubernetes_config_map_v1" "tower_app_configmap" { module.redis, module.eks ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } ## This local is used to control the password values passed to the db setup job. locals { - db_root_password = var.db_root_password != "" ? var.db_root_password : random_password.db_root_password[0].result - db_app_password = var.db_app_password != "" ? var.db_app_password : random_password.db_app_password[0].result + db_root_password = var.db_root_password != "" ? var.db_root_password : try(random_password.db_root_password[0].result, null) + db_app_password = var.db_app_password != "" ? var.db_app_password : try(random_password.db_app_password[0].result, null) } # This resource creates a kubernetes that will provision the Seqera user in the DB with the required permissions. resource "kubernetes_job_v1" "seqera_schema_job" { - count = var.create_db_cluster && var.create_db_cluster && var.create_eks_cluster ? 1 : 0 + count = var.create_db_cluster && var.create_eks_cluster ? 1 : 0 metadata { name = var.db_setup_job_name namespace = var.seqera_namespace_name @@ -325,6 +364,13 @@ resource "kubernetes_job_v1" "seqera_schema_job" { module.eks, module.db ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } # A Helm release resource for deploying the AWS cluster autoscaler using the Helm package manager. @@ -353,6 +399,18 @@ resource "helm_release" "aws_cluster_autoscaler" { depends_on = [ module.eks ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + + postcondition { + condition = self.status == "deployed" + error_message = "Failed to deploy the AWS cluster autoscaler Helm chart." + } + } } # A Helm release resource for deploying the AWS EBS CSI driver using the Helm package manager. @@ -372,6 +430,18 @@ resource "helm_release" "aws-ebs-csi-driver" { module.eks, module.aws_ebs_csi_driver_iam_policy ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + + postcondition { + condition = self.status == "deployed" + error_message = "Failed to deploy the AWS EBS CSI driver Helm chart." + } + } } # Customer Resource Definition (CRD) for the AWS Load Balancer Controller. @@ -973,6 +1043,13 @@ YAML depends_on = [ module.eks ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } # A Helm release resource for deploying the AWS Load Balancer Controller using the Helm package manager. @@ -1012,6 +1089,18 @@ resource "helm_release" "aws-load-balancer-controller" { module.eks, kubectl_manifest.aws_loadbalancer_controller_crd ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + + postcondition { + condition = self.status == "deployed" + error_message = "Failed to deploy the AWS Load Balancer Controller Helm chart." + } + } } # This resource creates an AWS Elastic File System (EFS) specifically for the EKS cluster. @@ -1077,6 +1166,13 @@ resource "kubernetes_storage_class" "efs_storage_class" { depends_on = [ module.eks # Ensures that the EKS module is completely applied before this resource. ] + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + } } # This resource installs the AWS EFS CSI driver using Helm, a package manager for Kubernetes. @@ -1095,6 +1191,18 @@ resource "helm_release" "aws-efs-csi-driver" { name = "controller.serviceAccount.create" # Configuration parameter for the Helm chart. value = true # Sets the `controller.serviceAccount.create` parameter value to true. } + + lifecycle { + precondition { + condition = module.eks.cluster_status == "ACTIVE" + error_message = "The EKS cluster is not active." + } + + postcondition { + condition = self.status == "deployed" + error_message = "Failed to deploy the AWS EFS CSI driver Helm chart." + } + } } # This module creates a security group specifically for the database (DB) cluster.