kubespray - deptno/deptno.github.io GitHub Wiki
kubespray
setup
3๋ ์ด์์ vm(multipass ์ฐธ๊ณ ) ์ค๋น multipass ์ฌ์ฉ์ ๊ฐ์ ํ๊ธฐ ๋๋ฌธ์ ubuntu ๋ฅผ ๊ธฐ์ค์ผ๋ก ์์ฑ๋จ
dns
sudo vi /etc/hosts
# 3๋์ ์ ๊ทผํ ์ ์๋๋ก ์ ๋ณด ์ค์
configure access permission
ssh-keygen
ssh-copy-id [name - /etc/hosts ์ ๊ธฐ์ฌํ ์ด๋ฆ]
์๊ธฐ ์์ ๋ ansible ๋์์ด๋ผ ํจ๊ป ๋ณต์ฌํด์ผํ๋ค
์คํจ์
ssh-keygen
cat ~/.ssh/id_rsa.pub # ๋ณต์ฌ
vi ~/.ssh/authorized_keys # ๋ง์ง๋ง ์ค์ ์ถ๊ฐ, /etc/hosts ์ ์ค์ ํ ์ปจํธ๋กค ํ๋ ์ธ์ด ๋ ๋ชจ๋ ๋
ธ๋์ ๋ํด ์ค์
ssh [name - /etc/hosts ์ ๊ธฐ์ฌํ ์ด๋ฆ] # ์ ์ ํ์ธ
configure kubespray
git clone -b v2.22.0 https://github.com/kubernetes-sigs/kubespray.git
cd kubespray
sudo apt install python3-pip -y
sudo pip3 install -r requirements.txt
cp -r inventory/sample inventory/pi
vi inventory/pi/hosts.yml # ์ค์ ์ kubespray ๋ฌธ์ ์ฐธ๊ณ
vi inventory/pi/group_vars/k8s_cluster/k8s-cluster.yml # ์ค์ ์ kubespray ๋ฌธ์ ์ฐธ๊ณ
ansible-playbook -i inventory/pi/hosts.yml --become --become-user=root -v cluster.yml
all:
hosts:
pi0:
ansible_host: pi0
pi1:
ansible_host: pi1
pi2:
ansible_host: pi2
children:
kube_control_plane:
hosts:
pi0:
pi1:
pi2:
kube_node:
hosts:
pi0:
pi1:
pi2:
etcd:
hosts:
pi0:
pi1:
pi2:
k8s_cluster:
children:
kube_control_plane:
kube_node:
calico_rr:
calico_rr:
hosts: {}
kube_proxy_strlct_arp: true # MetalLB ์ค์
kubernetes audit: true
install kubectl
sudo snap install kubectl --classic
kubectl get nodes -o wide
configure local kube/config
์ฌ๊ธฐ๋ localhost ๊ธฐ ๋๋ฌธ์ ๋ฌผ๋ฆฌ์ ์ธ ์ฌ์ฉ ์ปดํจํฐ, ๋์ ๊ฒฝ์ฐ์๋ mac ์ด๋ค.
kubectl
์ด ๊น๋ ค์๋ค๊ณ ๊ฐ์ ํ๋ค.
vi ~/.kube/config # vm ๋ด์ `/root/.kube/config` ๋ฅผ ๋ณต์ฌํด์จ๋ค, ๋ชจ๋ฅด๊ฒ์ผ๋ฉด [multipass](/deptno/deptno.github.io/wiki/multipass) mount ์ฐธ์กฐ
# clusters.cluster[].server ์ ip control-plane ์ ip๋ก ๊ต์ฒดํ๋ค.
# 2022-11-27 ์ฑ
์์๋ ์ถ๊ฐ์ผ๋ก ์๋ ์ ๋ณด ์์ ์ ํ๊ณ ์๋๋ฐ ์ด๊ฑด ์ฌ๋ฌ ํด๋ฌ์คํฐ๋ฅผ ์ฌ์ฉํ ๋ ๋ก์ปฌ์์ ๊ตฌ๋ถํ๊ธฐ ์ํ ๊ฑฐ์ผ๋ก ์๊ฐ๋๋ค.
# - clusters.cluster[].name
# - clusters.contexts[].context[].cluster
# - clusters.contexts[].context[].user
# - clusters.contexts[].name
# - current-context
# - users[].name
$ kubectl get nodes
NAME STATUS ROLES AGE VERSION
kube00 Ready control-plane 31m v1.25.4
kube01 Ready control-plane 30m v1.25.4
kube02 Ready control-plane 30m v1.25.4
error
The connection to the server localhost:8080 was refused - did you specify the right host or port?
ansible-playbook
๋ช
๋ น์ด๋ฅผ ์คํํ ๋ --become-user=root
๊ฐ ๋์ด ์์ด ~/.kube/config
๊ฐ root
๋ฅผ ๊ธฐ์ค์ผ๋ก ์์ฑ๋ ๊ฒ์ผ๋ก ์๊ฐ๋๋ค.
fatal: [kube02]: FAILED! => {"changed": false, "msg": "modprobe: FATAL: Module nf_conntrack_ipv4 not found in directory /lib/modules/5.15.0-53-generic\n", "name": "nf_conntrack_ipv4", "params": "", "rc": 1, "state": "present", "stderr": "modprobe: FATAL: Module nf_conntrack_ipv4 not found in directory /lib/modules/5.15.0-53-generic\n", "stderr_lines": ["modprobe: FATAL: Module nf_conntrack_ipv4 not found in directory /lib/modules/5.15.0-53-generic"], "stdout": "", "stdout_lines": []}
fatal: [kube02]: FAILED! => {"changed": false, "msg": "modprobe: FATAL: Module nf_conntrack_ipv4 not found in directory /lib/modules/5.15.0-53-generic\n", "name": "nf_conntrack_ipv4", "params": "", "rc": 1, "state": "present", "stderr": "modprobe: FATAL: Module nf_conntrack_ipv4 not found in directory /lib/modules/5.15.0-53-generic\n", "stderr_lines": ["modprobe: FATAL: Module nf_conntrack_ipv4 not found in directory /lib/modules/5.15.0-53-generic"], "stdout": "", "stdout_lines": []}
...ignoring
Sunday 27 November 2022 17:37:38 +0900 (0:00:00.273) 0:11:17.710 *******
TASK [kubernetes/node : Persist ip_vs modules] **************************************************************************************************************
changed: [kube00] => {"changed": true, "checksum": "963dae5d0f149158bd6b9a750827856f6f2382fd", "dest": "/etc/modules-load.d/kube_proxy-ipvs.conf", "gid": 0, "group": "root", "md5sum": "0f7f7753a47d8c043fb1f8043b65beb4", "mode": "0644"
ansible-playbook
์ปค๋งจ๋ ์ค์ ๋ณด์ธ ์๋ฌ ๋ฉ์์ง์ธ๋ฐ ๋ฌด์ํด๋ ์๋์๋ค.
fatal: [kube03]: FAILED! => {"msg": "Missing sudo password"}
ubuntu@kube00:~/kubespray$ ansible-playbook -i inventory/mycluster/hosts-new02.yml -b facts.yml
[WARNING]: Skipping callback plugin 'ara_default', unable to load
PLAY [Gather facts] *************************************************************
Thursday 22 December 2022 13:40:19 +0900 (0:00:00.012) 0:00:00.012 *****
TASK [Gather minimal facts] *****************************************************
ok: [kube00]
ok: [kube02]
ok: [kube01]
fatal: [kube03]: FAILED! => {"msg": "Missing sudo password"}
sudo -l
์ปค๋งจํธ๋ฅผ ํตํด ๋์ค๋ ์์๊ฐ ์๋ฏธ๊ฐ ์์ผ๋ฉฐ ์ค๋ฒ๋ผ์ด๋ฉ ๋์ด ๋ค๋ฅด๊ฒ ๋์ ํ ์ ์๋ค.
- https://github.com/ansible/ansible/issues/71939#issuecomment-702185274 ์ ์ ์ ๋ํ ๊ถํ์ ๋งจ ์๋ซ์ค๋ก ์ฎ๊ฒจ์ ์ฃผ๋ ํด๊ฒฐ๋จ ๊ณผ ๊ฐ์ ์ํ๊ฐ ๋์์ผ๋ฉฐ kubespray ๊ฐ ์ ์ ๋์ํ๋ค.
# User privilege specification
root ALL=(ALL:ALL) ALL
# ! ๋ฌธ์ ์์ญ
# ubuntu ALL=(ALL) NOPASSWD: ALL
# Members of the admin group may gain root privileges
%admin ALL=(ALL) ALL
# Allow members of group sudo to execute any command
%sudo ALL=(ALL:ALL) ALL
# See sudoers(5) for more information on "@include" directives:
# +update: 2022-12-29 ! ์ด๋ ํ ํด๊ฒฐ๋ ์์ญ
ubuntu ALL=(ALL) NOPASSWD: ALL
# -update: 2022-12-29
@includedir /etc/sudoers.d
# ! ์ด๋ ํ ํด๊ฒฐ๋ ์์ญ
ubuntu ALL=(ALL) NOPASSWD: ALL
๋ฌธ์ ์ํฉ
ubuntu@kube03:~$ sudo -l
Matching Defaults entries for ubuntu on kube03:
env_reset, mail_badpass, secure_path=/usr/local/sbin\:/usr/local/bin\:/usr/sbin\:/usr/bin\:/sbin\:/bin\:/snap/bin, use_pty
User ubuntu may run the following commands on kube03:
(ALL) NOPASSWD: ALL
(ALL : ALL) ALL
ubuntu@kube03:~$ !v
vi /etc/sudoers
ubuntu@kube03:~$ sudo vi /etc/sudoers
ํด๊ฒฐ๋จ
ubuntu@kube03:~$ sudo -l
Matching Defaults entries for ubuntu on kube03:
env_reset, mail_badpass, secure_path=/usr/local/sbin\:/usr/local/bin\:/usr/sbin\:/usr/bin\:/sbin\:/bin\:/snap/bin, use_pty
User ubuntu may run the following commands on kube03:
(ALL : ALL) ALL
(ALL) NOPASSWD: ALL
ubuntu@kube03:~$
fatal: [kube03]: FAILED! => {"changed": false, "elapsed": 300, "msg": "Timeout when waiting for file /etc/cni/net.d/calico-kubeconfig"}
fatal: [pi0]: FAILED! => {"changed": false, "msg": "modprobe: FATAL: Module dummy not found in directory /lib/modules/5.15.0-1012-raspi\n", "name": "dummy", "params": "numdummies=0", "rc": 1, "state": "present", "stderr": "modprobe: FATAL: Module dummy not found in directory /lib/modules/5.15.0-1012-raspi\n", "stderr_lines": ["modprobe: FATAL: Module dummy not found in directory /lib/modules/5.15.0-1012-raspi"], "stdout": "", "stdout_lines": []}
- ๋ต์ ์ฐพ์ง ๋ชปํ ์ํ, ubuntu 22.04 -> ubuntu 20.04 ๋ก ๋ค์ด๊ทธ๋ ์ด๋ํจ
Failed to connect to the host via ssh: ssh: Could not resolve hostname pi2: Temporary failure in name resolution"
/etc/hosts ๋ dhcp ์๋ฒ์ resolv๊ฐ ์๋๋ ๊ฒ์ผ๋ก ๋ณด์ธ๋ค
the output has been hidden due to the fact that 'no_log: true' was specified for this result"
์ค์น์์ ๋ ธ๋ ์ค ๋ค๋ฅธ ์ํคํ ์ณ๊ฐ ์์ผ๋ฉด ๋ฐ์ํ๋ ๊ฒ์ผ๋ก ์๊ฐ๋จ, eg) control-plane (arm) + worker node (x86) ์กฐํฉ
TASK [etcd : Configure | Wait for etcd cluster to be healthy] ***********************************************************************************************************************************************************************
task path: /Users/deptno/workspace/src/github.com/kubespray/roles/etcd/tasks/configure.yml:82
fatal: [pi0]: FAILED! => {
"attempts": 4,
"changed": false,
"cmd": "set -o pipefail && /usr/local/bin/etcdctl endpoint --cluster status && /usr/local/bin/etcdctl endpoint --cluster health 2>&1 | grep -v 'Error: unhealthy cluster' >/dev/null",
"delta": "0:00:05.194729",
"end": "2022-12-30 11:54:44.046275",
"invocation": {
"module_args": {
"_raw_params": "set -o pipefail && /usr/local/bin/etcdctl endpoint --cluster status && /usr/local/bin/etcdctl endpoint --cluster health 2>&1 | grep -v 'Error: unhealthy cluster' >/dev/null",
"_uses_shell": true,
"argv": null,
"chdir": null,
"creates": null,
"executable": "/bin/bash",
"removes": null,
"stdin": null,
"stdin_add_newline": true,
"strip_empty_ends": true,
"warn": false
}
},
"msg": "non-zero return code",
"rc": 1,
"start": "2022-12-30 11:54:38.851546",
"stderr": "{\"level\":\"warn\",\"ts\":\"2022-12-30T11:54:43.954+0900\",\"logger\":\"etcd-client\",\"caller\":\"[email protected]/retry_interceptor.go:62\",\"msg\":\"retrying of unary invoker failed\",\"target\":\"etcd-endpoints://0x40000d4c40/192.168.0.74:2379\",\"attempt\":0,\"error\":\"rpc error: code = DeadlineExceeded desc = context deadline exceeded\"}\nFailed to get the status of endpoint https://192.168.0.78:2379 (context deadline exceeded)",
"stderr_lines": [
"{\"level\":\"warn\",\"ts\":\"2022-12-30T11:54:43.954+0900\",\"logger\":\"etcd-client\",\"caller\":\"[email protected]/retry_interceptor.go:62\",\"msg\":\"retrying of unary invoker failed\",\"target\":\"etcd-endpoints://0x40000d4c40/192.168.0.74:2379\",\"attempt\":0,\"error\":\"rpc error: code = DeadlineExceeded desc = context deadline exceeded\"}",
"Failed to get the status of endpoint https://192.168.0.78:2379 (context deadline exceeded)"
],
"stdout": "https://192.168.0.77:2379, afd0f05f33356a3a, 3.5.6, 20 kB, false, false, 9, 30, 30, \nhttps://192.168.0.74:2379, f0a124daf3245703, 3.5.6, 20 kB, true, false, 9, 30, 30, ",
"stdout_lines": [
"https://192.168.0.77:2379, afd0f05f33356a3a, 3.5.6, 20 kB, false, false, 9, 30, 30, ",
"https://192.168.0.74:2379, f0a124daf3245703, 3.5.6, 20 kB, true, false, 9, 30, 30, "
]
}
sudo service ufw stop
sudo vi /etc/hosts
- ๋ฐฉํ๋ฒฝ ํด์
-
sudo vi /etc/hosts
ํธ์คํธ๊ฐ ๋ช ์ ์ดํ ์๋ฌ๋ฉ์์ง ๋ณํจ -> ์ฌ์์ ํ๋ ๊ฐ์
fatal: [pi0]: FAILED! => {
"attempts": 5,
"changed": false,
"cmd": [
"/usr/local/bin/kubeadm",
"--kubeconfig",
"/etc/kubernetes/admin.conf",
"token",
"create"
],
"delta": "0:01:15.145403",
"end": "2022-12-30 12:25:47.954213",
"invocation": {
"module_args": {
"_raw_hello_exchange: master version 4\r\ndebug3: mux_client_forwards: request forwardings: 0 local, 0 remote\r\ndebug3: mux_client_request_session: entering\r\ndebug3: mux_client_request_alive: entering\r\ndebug3: mux_client_request_alive: done pid = 82745\r\ndebug3: mux_client_request_session: session request sent\r\ndebug1: mux_client_request_session: master session id: 4\r\ndebug3: mux_client_read_packet: read header failed: Broken pipe\r\ndebug2: Received exit status from master 1\r\n")
-
declare IPS
๋ก control-plane ip ์ค์ -> ์คํจ - `ansible-playbook [....] --private-key=~/.ssh/[private key]
- ํด๊ฒฐ ์๋จ