Skip to content

Commit

Permalink
[#77] Argo: EKS WiP
Browse files Browse the repository at this point in the history
  • Loading branch information
Dmitriy Karbyshev committed Apr 12, 2021
1 parent aa1c153 commit 34c64fb
Show file tree
Hide file tree
Showing 10 changed files with 185 additions and 10 deletions.
2 changes: 1 addition & 1 deletion terraform/env_types/aws/eks/eks_create/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ module "eks" {
k8s_version = var.k8s_version
cluster_name = var.cluster_name
vpc_id = module.vpc.vpc_id
node_pools = var.node_pools
node_pools = try(merge(var.node_pools, var.argo.node_pool), var.node_pools)
master_role_arn = module.iam.master_role_arn
master_sg_id = module.firewall.master_sg_id
node_role_arn = module.iam.node_role_arn
Expand Down
38 changes: 38 additions & 0 deletions terraform/env_types/aws/eks/eks_create/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -88,3 +88,41 @@ variable "bastion_hostname" {
default = "bastion"
description = "Bastion hostname"
}

variable "argo" {
type = object({
enabled = bool
namespace = string
workflows_namespace = string
artifact_bucket = string
node_pool = any
})
default = {
enabled = false
namespace = "argo"
workflows_namespace = "argo-workflows"
artifact_bucket = ""
node_pool = {
argo-workflows = {
init_node_count = 0
min_node_count = 0
max_node_count = 1
preemptible = true
machine_type = "m5.large"
disk_size_gb = 40
labels = {
machine_type = "m5.large"
mode = "argo-workflows"
}
taints = [
{
key = "dedicated"
effect = "NO_SCHEDULE"
value = "argo"
}
]
}
}
}
description = "Argo configuration"
}
13 changes: 9 additions & 4 deletions terraform/env_types/aws/eks/k8s_setup/locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,18 @@ locals {

is_lb_an_ip = length(regexall("^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$", module.nginx_ingress_prereqs.load_balancer_ip)) > 0

databases = [
argo_db = var.argo.enabled ? "argo" : ""

argo_bucket_name = var.argo.artifact_bucket == "" ? "${var.cluster_name}-argo-artifacts" : var.argo.artifact_bucket

argo_artifact_bucket_name = var.argo.enabled ? local.argo_bucket_name : ""

databases = compact(concat([
"airflow",
"mlflow",
"jupyterhub",
"vault",
var.odahu_database,
"grafana",
"argo"
]
"grafana"
], [local.argo_db]))
}
47 changes: 47 additions & 0 deletions terraform/env_types/aws/eks/k8s_setup/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -180,6 +180,7 @@ module "odahuflow_prereqs" {
cluster_name = var.cluster_name
kms_key_arn = var.kms_key_arn
data_bucket = var.data_bucket
argo_artifact_bucket = ""
log_bucket = var.log_bucket
log_expiration_days = var.log_expiration_days
openid_connect_provider = module.irsa.openid_connect_provider
Expand Down Expand Up @@ -250,6 +251,51 @@ module "storage-syncer" {
depends_on = [module.airflow]
}

#module "argo_workflow_prereqs" {
# count = var.argo.enabled ? 1 : 0
# source = "../../../../modules/k8s/argo/prereqs/eks"
#
# bucket = module.odahuflow_prereqs.argo_artifact_bucket_name
# cluster_name = var.cluster_name
# namespace = var.argo.namespace
# workflows_namespace = var.argo.workflows_namespace
# region = var.aws_region
# kms_key_arn = var.kms_key_arn
# openid_connect_provider = module.irsa.openid_connect_provider
#
# depends_on = [module.odahuflow_prereqs]
#}

module "minio" {
source = "../../../../modules/k8s/minio"

cluster_domain = var.domain
tls_secret_key = var.tls_key
tls_secret_crt = var.tls_crt
}

#module "argo_workflow" {
# count = var.argo.enabled ? 1 : 0
# source = "../../../../modules/k8s/argo/helm"
#
# cluster_domain = var.domain
# configuration = merge(var.argo, { artifact_bucket = module.odahuflow_prereqs.argo_artifact_bucket_name })
# sa_annotations = module.argo_workflow_prereqs[0].argo_sa_annotations
# artifact_repository_config = module.argo_workflow_prereqs[0].argo_artifact_repository_config
# tls_secret_crt = var.tls_crt
# tls_secret_key = var.tls_key
# pgsql = {
# enabled = var.postgres.enabled
# db_host = module.postgresql.pgsql_endpoint
# db_name = "argo"
# db_user = ""
# db_password = ""
# secret_namespace = module.postgresql.pgsql_credentials["argo"].namespace
# secret_name = module.postgresql.pgsql_credentials["argo"].secret
# }
# depends_on = [module.postgresql, module.argo_workflow_prereqs[0]]
#}

module "fluentd" {
source = "../../../../modules/k8s/fluentd"

Expand Down Expand Up @@ -377,6 +423,7 @@ module "odahuflow_helm" {
extra_external_urls = concat(
module.jupyterhub.external_url,
module.airflow.external_url,
# module.argo_workflow[0].external_url,
module.odahuflow_prereqs.extra_external_urls
)

Expand Down
39 changes: 39 additions & 0 deletions terraform/env_types/aws/eks/k8s_setup/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -504,3 +504,42 @@ variable "opa" {
}
}
}

variable "argo" {
type = object({
enabled = bool
namespace = string
artifact_bucket = string
node_pool = any
workflows_namespace = string

})
default = {
enabled = false
namespace = "argo"
workflows_namespace = "argo-workflows"
artifact_bucket = ""
node_pool = {
argo-workflows = {
init_node_count = 0
min_node_count = 0
max_node_count = 1
preemptible = true
machine_type = "m5.large"
disk_size_gb = 40
labels = {
machine_type = "m5.large"
mode = "argo-workflows"
}
taints = [
{
key = "dedicated"
effect = "NO_SCHEDULE"
value = "argo"
}
]
}
}
}
description = "Argo configuration"
}
4 changes: 4 additions & 0 deletions terraform/env_types/gcp/gke/k8s_setup/locals.tf
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,10 @@ locals {

argo_db = var.argo.enabled ? "argo" : ""

argo_bucket_name = var.argo.artifact_bucket == "" ? "${var.cluster_name}-argo-artifacts" : var.argo.artifact_bucket

argo_artifact_bucket_name = var.argo.enabled ? local.argo_bucket_name : ""

databases = compact(concat([
"airflow",
"mlflow",
Expand Down
15 changes: 10 additions & 5 deletions terraform/env_types/gcp/gke/k8s_setup/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ module "odahuflow_prereqs" {
kms_key_id = var.kms_key_id
data_bucket = var.data_bucket
log_bucket = var.log_bucket
argo_artifact_bucket = var.argo.artifact_bucket
argo_artifact_bucket = local.argo_artifact_bucket_name
log_expiration_days = var.log_expiration_days
uniform_bucket_level_access = var.uniform_bucket_level_access
fluentd_resources = var.fluentd_resources
Expand Down Expand Up @@ -234,6 +234,8 @@ module "airflow" {
}

module "argo_workflow_prereqs" {
count = var.argo.enabled ? 1 : 0

source = "../../../../modules/k8s/argo/prereqs/gke"
cluster_name = var.cluster_name
bucket = module.odahuflow_prereqs.argo_artifact_bucket_name
Expand All @@ -246,10 +248,13 @@ module "argo_workflow_prereqs" {
}

module "argo_workflow" {
source = "../../../../modules/k8s/argo/main"
count = var.argo.enabled ? 1 : 0

source = "../../../../modules/k8s/argo/helm"
cluster_domain = var.cluster_domain_name
configuration = merge(var.argo, { artifact_bucket = module.odahuflow_prereqs.argo_artifact_bucket_name })
workflows_sa = module.argo_workflow_prereqs.argo_workflows_sa
sa_annotations = module.argo_workflow_prereqs[0].argo_sa_annotations
artifact_repository_config = module.argo_workflow_prereqs[0].argo_artifact_repository_config
tls_secret_crt = var.tls_crt
tls_secret_key = var.tls_key
pgsql = {
Expand All @@ -261,7 +266,7 @@ module "argo_workflow" {
secret_namespace = module.postgresql.pgsql_credentials["argo"].namespace
secret_name = module.postgresql.pgsql_credentials["argo"].secret
}
depends_on = [module.postgresql, module.argo_workflow_prereqs]
depends_on = [module.postgresql, module.argo_workflow_prereqs[0]]
}

module "storage-syncer" {
Expand Down Expand Up @@ -396,7 +401,7 @@ module "odahuflow_helm" {
extra_external_urls = concat(
module.jupyterhub.external_url,
module.airflow.external_url,
module.argo_workflow.external_url,
module.argo_workflow[0].external_url,
module.elasticsearch.external_url,
module.odahuflow_prereqs.extra_external_urls
)
Expand Down
27 changes: 27 additions & 0 deletions terraform/modules/odahuflow/prereqs/eks/main.tf
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,33 @@ resource "aws_ecr_repository" "this" {
name = var.cluster_name
}

########################################################
# S3 Argo artifacts bucket
########################################################

resource "aws_s3_bucket" "argo_artifacts" {
count = var.argo_artifact_bucket == "" ? 0 : 1

bucket = var.argo_artifact_bucket
acl = "private"
region = var.region
force_destroy = true

server_side_encryption_configuration {
rule {
apply_server_side_encryption_by_default {
kms_master_key_id = basename(var.kms_key_arn)
sse_algorithm = "aws:kms"
}
}
}

tags = {
Name = var.argo_artifact_bucket
Env = var.cluster_name
}
}

########################################################
# AWS IAM User for Fluentd
########################################################
Expand Down
4 changes: 4 additions & 0 deletions terraform/modules/odahuflow/prereqs/eks/output.tf
Original file line number Diff line number Diff line change
Expand Up @@ -94,3 +94,7 @@ output "jupyter_notebook_sa_annotations" {
"eks.amazonaws.com/role-arn" = aws_iam_role.jupyter_notebook.arn
}
}

output "argo_artifact_bucket_name" {
value = var.argo_artifact_bucket == "" ? "" : aws_s3_bucket.argo_artifacts[0].bucket
}
6 changes: 6 additions & 0 deletions terraform/modules/odahuflow/prereqs/eks/variables.tf
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,12 @@ variable "log_bucket" {
description = "ODAHU flow logs storage bucket"
}

variable "argo_artifact_bucket" {
type = string
default = ""
description = "Argo artifacts bucket"
}

variable "log_expiration_days" {
type = number
default = 1
Expand Down

0 comments on commit 34c64fb

Please sign in to comment.