Add playbook and role to reset the cluster
This deletes everything related to the cluster and allows to start from scratch.
This commit is contained in:
parent
4265149463
commit
00ad151186
2 changed files with 43 additions and 0 deletions
5
reset.yml
Normal file
5
reset.yml
Normal file
|
@ -0,0 +1,5 @@
|
||||||
|
---
|
||||||
|
|
||||||
|
- hosts: all
|
||||||
|
roles:
|
||||||
|
- { role: reset, tags: reset }
|
38
roles/reset/tasks/main.yml
Normal file
38
roles/reset/tasks/main.yml
Normal file
|
@ -0,0 +1,38 @@
|
||||||
|
---
|
||||||
|
|
||||||
|
- name: reset | stop services
|
||||||
|
service: name={{item}} state=stopped
|
||||||
|
with_items:
|
||||||
|
- kubelet
|
||||||
|
- etcd
|
||||||
|
failed_when: false
|
||||||
|
|
||||||
|
- name: reset | remove services
|
||||||
|
file: path="/etc/systemd/system/{{item}}" state=absent
|
||||||
|
with_items:
|
||||||
|
- kubelet
|
||||||
|
- etcd
|
||||||
|
register: services_removed
|
||||||
|
|
||||||
|
- name: reset | systemctl daemon-reload
|
||||||
|
command: systemctl daemon-reload
|
||||||
|
when: ansible_service_mgr == "systemd" and services_removed.changed
|
||||||
|
|
||||||
|
- name: reset | remove all containers
|
||||||
|
shell: docker ps -aq | xargs -r docker rm -fv
|
||||||
|
|
||||||
|
- name: reset | gather mounted kubelet dirs
|
||||||
|
shell: mount | grep /var/lib/kubelet | awk '{print $3}'
|
||||||
|
register: mounted_dirs
|
||||||
|
|
||||||
|
- name: reset | unmount kubelet dirs
|
||||||
|
command: umount {{item}}
|
||||||
|
with_items: '{{mounted_dirs.stdout_lines}}'
|
||||||
|
|
||||||
|
- name: reset | delete some files and directories
|
||||||
|
file: path={{ item }} state=absent
|
||||||
|
with_items:
|
||||||
|
- /etc/kubernetes/
|
||||||
|
- /var/lib/kubelet
|
||||||
|
- /var/lib/etcd
|
||||||
|
|
Loading…
Reference in a new issue