[WIP] nodepools feature

This commit is contained in:
jodhi 2022-02-23 22:46:46 +07:00
parent 054782d6be
commit 7d301f2c6d
5 changed files with 38 additions and 18 deletions

View File

@ -1,9 +1,9 @@
module "agents" { module "agents" {
source = "./modules/host" source = "./modules/host"
count = var.agents_num for_each = local.agent_nodepools
name = "k3s-agent-${count.index}"
name = each.key
ssh_keys = [hcloud_ssh_key.k3s.id] ssh_keys = [hcloud_ssh_key.k3s.id]
public_key = var.public_key public_key = var.public_key
private_key = var.private_key private_key = var.private_key
@ -12,8 +12,8 @@ module "agents" {
placement_group_id = hcloud_placement_group.k3s.id placement_group_id = hcloud_placement_group.k3s.id
location = var.location location = var.location
network_id = hcloud_network.k3s.id network_id = hcloud_network.k3s.id
ip = cidrhost(hcloud_network_subnet.k3s.ip_range, 513 + count.index) ip = cidrhost(hcloud_network_subnet.k3s.ip_range, 513 + each.value.index)
server_type = var.agent_server_type server_type = each.value.server_type
labels = { labels = {
"provisioner" = "terraform", "provisioner" = "terraform",
@ -24,28 +24,28 @@ module "agents" {
} }
resource "null_resource" "agents" { resource "null_resource" "agents" {
count = var.agents_num for_each = local.agent_nodepools
triggers = { triggers = {
agent_id = module.agents[count.index].id agent_id = module.agents[each.key].id
} }
connection { connection {
user = "root" user = "root"
private_key = local.ssh_private_key private_key = local.ssh_private_key
agent_identity = local.ssh_identity agent_identity = local.ssh_identity
host = module.agents[count.index].ipv4_address host = module.agents[each.key].ipv4_address
} }
# Generating k3s agent config file # Generating k3s agent config file
provisioner "file" { provisioner "file" {
content = yamlencode({ content = yamlencode({
node-name = module.agents[count.index].name node-name = module.agents[each.key].name
server = "https://${local.first_control_plane_network_ip}:6443" server = "https://${local.first_control_plane_network_ip}:6443"
token = random_password.k3s_token.result token = random_password.k3s_token.result
kubelet-arg = "cloud-provider=external" kubelet-arg = "cloud-provider=external"
flannel-iface = "eth1" flannel-iface = "eth1"
node-ip = cidrhost(hcloud_network_subnet.k3s.ip_range, 513 + count.index) node-ip = cidrhost(hcloud_network_subnet.k3s.ip_range, 513 + each.value.index)
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : [] node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
}) })
destination = "/tmp/config.yaml" destination = "/tmp/config.yaml"

View File

@ -30,4 +30,14 @@ locals {
install_k3s_server = concat(local.common_commands_install_k3s, ["curl -sfL https://get.k3s.io | INSTALL_K3S_SKIP_SELINUX_RPM=true INSTALL_K3S_SKIP_START=true INSTALL_K3S_EXEC=server sh -"]) install_k3s_server = concat(local.common_commands_install_k3s, ["curl -sfL https://get.k3s.io | INSTALL_K3S_SKIP_SELINUX_RPM=true INSTALL_K3S_SKIP_START=true INSTALL_K3S_EXEC=server sh -"])
install_k3s_agent = concat(local.common_commands_install_k3s, ["curl -sfL https://get.k3s.io | INSTALL_K3S_SKIP_SELINUX_RPM=true INSTALL_K3S_SKIP_START=true INSTALL_K3S_EXEC=agent sh -"]) install_k3s_agent = concat(local.common_commands_install_k3s, ["curl -sfL https://get.k3s.io | INSTALL_K3S_SKIP_SELINUX_RPM=true INSTALL_K3S_SKIP_START=true INSTALL_K3S_EXEC=agent sh -"])
agent_nodepools = merge([
for nodepool_name, nodepool_obj in var.agent_nodepools : {
for index in range(nodepool_obj.count) :
format("%s-%s", nodepool_name, index) => {
index : index, # just for the compatibility with previous structure
server_type : nodepool_obj.server_type
}
}
]...)
} }

View File

@ -4,7 +4,9 @@ output "controlplanes_public_ip" {
} }
output "agents_public_ip" { output "agents_public_ip" {
value = module.agents.*.ipv4_address value = [
for obj in module.agents : obj.ipv4_address
]
description = "The public IP addresses of the agent server." description = "The public IP addresses of the agent server."
} }

View File

@ -9,7 +9,6 @@ private_key = "/home/username/.ssh/id_ed25519"
# For Hetzner server types see https://www.hetzner.com/cloud # For Hetzner server types see https://www.hetzner.com/cloud
location = "fsn1" # change to `ash` for us-east Ashburn, Virginia location location = "fsn1" # change to `ash` for us-east Ashburn, Virginia location
network_region = "eu-central" # change to `us-east` if location is ash network_region = "eu-central" # change to `us-east` if location is ash
agent_server_type = "cpx21"
control_plane_server_type = "cpx11" control_plane_server_type = "cpx11"
lb_server_type = "lb11" lb_server_type = "lb11"
@ -17,7 +16,17 @@ lb_server_type = "lb11"
servers_num = 3 servers_num = 3
# For agent nodes, at least 2 is recommended for HA, but you can keep automatic upgrades. # For agent nodes, at least 2 is recommended for HA, but you can keep automatic upgrades.
agents_num = 2 agent_nodepools = {
big = {
server_type = "cpx31",
count = 3
}
# Will fail because of the ip address collision (we use index number)
# small = {
# server_type = "cpx31",
# count = 3
# }
}
# If you want to use a specific Hetzner CCM and CSI version, set them below, otherwise leave as is for the latest versions # If you want to use a specific Hetzner CCM and CSI version, set them below, otherwise leave as is for the latest versions
# hetzner_ccm_version = "" # hetzner_ccm_version = ""

View File

@ -35,10 +35,6 @@ variable "control_plane_server_type" {
type = string type = string
} }
variable "agent_server_type" {
description = "Default agent server type"
type = string
}
variable "lb_server_type" { variable "lb_server_type" {
description = "Default load balancer server type" description = "Default load balancer server type"
@ -56,9 +52,12 @@ variable "servers_num" {
type = number type = number
} }
variable "agents_num" { variable "agent_nodepools" {
description = "Number of agent nodes." description = "Number of agent nodes."
type = number type = map(object({
server_type = string
count = number
}))
} }
variable "hetzner_ccm_version" { variable "hetzner_ccm_version" {