fixed TLS handshake error on agents
This commit is contained in:
parent
52bb5f663f
commit
c92868c054
1
.gitignore
vendored
1
.gitignore
vendored
@ -6,3 +6,4 @@ kubeconfig.yaml-e
|
|||||||
terraform.tfvars
|
terraform.tfvars
|
||||||
plans.yaml
|
plans.yaml
|
||||||
traefik_config.yaml
|
traefik_config.yaml
|
||||||
|
kured.yaml
|
||||||
|
@ -44,11 +44,11 @@ resource "null_resource" "agents" {
|
|||||||
provisioner "file" {
|
provisioner "file" {
|
||||||
content = yamlencode({
|
content = yamlencode({
|
||||||
node-name = module.agents[each.key].name
|
node-name = module.agents[each.key].name
|
||||||
server = "https://${local.first_control_plane_network_ipv4}:6443"
|
server = "https://${module.control_planes[0].private_ipv4_address}:6443"
|
||||||
token = random_password.k3s_token.result
|
token = random_password.k3s_token.result
|
||||||
kubelet-arg = "cloud-provider=external"
|
kubelet-arg = "cloud-provider=external"
|
||||||
flannel-iface = "eth1"
|
flannel-iface = "eth1"
|
||||||
node-ip = module.agents[each.key].ipv4_address
|
node-ip = module.agents[each.key].private_ipv4_address
|
||||||
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
|
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
|
||||||
})
|
})
|
||||||
destination = "/tmp/config.yaml"
|
destination = "/tmp/config.yaml"
|
||||||
|
@ -53,7 +53,6 @@ resource "null_resource" "control_planes" {
|
|||||||
kubelet-arg = "cloud-provider=external"
|
kubelet-arg = "cloud-provider=external"
|
||||||
node-ip = module.control_planes[count.index].private_ipv4_address
|
node-ip = module.control_planes[count.index].private_ipv4_address
|
||||||
advertise-address = module.control_planes[count.index].private_ipv4_address
|
advertise-address = module.control_planes[count.index].private_ipv4_address
|
||||||
tls-san = module.control_planes[count.index].private_ipv4_address
|
|
||||||
node-taint = var.allow_scheduling_on_control_plane ? [] : ["node-role.kubernetes.io/master:NoSchedule"]
|
node-taint = var.allow_scheduling_on_control_plane ? [] : ["node-role.kubernetes.io/master:NoSchedule"]
|
||||||
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
|
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
|
||||||
})
|
})
|
||||||
|
1
init.tf
1
init.tf
@ -18,7 +18,6 @@ resource "null_resource" "first_control_plane" {
|
|||||||
kubelet-arg = "cloud-provider=external"
|
kubelet-arg = "cloud-provider=external"
|
||||||
node-ip = module.control_planes[0].private_ipv4_address
|
node-ip = module.control_planes[0].private_ipv4_address
|
||||||
advertise-address = module.control_planes[0].private_ipv4_address
|
advertise-address = module.control_planes[0].private_ipv4_address
|
||||||
tls-san = module.control_planes[0].private_ipv4_address
|
|
||||||
node-taint = var.allow_scheduling_on_control_plane ? [] : ["node-role.kubernetes.io/master:NoSchedule"]
|
node-taint = var.allow_scheduling_on_control_plane ? [] : ["node-role.kubernetes.io/master:NoSchedule"]
|
||||||
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
|
node-label = var.automatically_upgrade_k3s ? ["k3s_upgrade=true"] : []
|
||||||
})
|
})
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
locals {
|
locals {
|
||||||
first_control_plane_network_ipv4 = module.control_planes[0].private_ipv4_address
|
|
||||||
|
|
||||||
ssh_public_key = trimspace(file(var.public_key))
|
ssh_public_key = trimspace(file(var.public_key))
|
||||||
# ssh_private_key is either the contents of var.private_key or null to use a ssh agent.
|
# ssh_private_key is either the contents of var.private_key or null to use a ssh agent.
|
||||||
ssh_private_key = var.private_key == null ? null : trimspace(file(var.private_key))
|
ssh_private_key = var.private_key == null ? null : trimspace(file(var.private_key))
|
||||||
|
Loading…
Reference in New Issue
Block a user