homeserver/host_vars/pve5.grote.lan.yml
mg 135bd53414 k3s - Basics (#423)
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Reviewed-on: #423
2022-11-04 20:58:37 +01:00

296 lines
9.4 KiB
YAML

---
# rpool ist unverschlüsselt als Boot-Medium
# der Speicherort fur die VMs ist verschlüsselt
# zfs create -o encryption=aes-256-gcm -o keyformat=passphrase rpool/vm
# entschlüsseln nach Boot mit: sudo zfs mount -a -l
## HDD_DATA_RAIDZ
### sudo zpool create -o ashift=12 -o feature@encryption=enabled -O encryption=on -O keylocation=prompt -O keyformat=passphrase hdd_data_raidz mirror /dev/disk/by-id/ata-TOSHIBA_MG09ACA18TE_Z1B0A27KFJDH /dev/disk/by-id/ata-TOSHIBA_MG09ACA18TE_Z1B0A28LFJDH
# mgrote.zfs_manage_datasets
### mgrote.zfs_extra
# Variablen für mgrote.zfs_health/trim/scrub/zed/arc_mem/ sind zusammengefasst unter zfs_extra_*
zfs_datasets: # DatenPools werden hier nicht verwaltet
# rpool - System-Datasets
- dataset: rpool
state: present
compression: zstd
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
reservation: 1G
refreservation: 10G
- dataset: rpool/ROOT
state: present
refreservation: 10G
- dataset: rpool/ROOT/pve-1
state: present
refreservation: 10G
# rpool - VMs
- dataset: rpool/vm
state: present
- dataset: rpool/vm/zvol
state: present
- dataset: rpool/vm/lxc
state: present
# hdd_data_raidz
- dataset: hdd_data_raidz
state: present
compression: zstd
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
reservation: 1G
- dataset: hdd_data_raidz/papa_backup
state: present
- dataset: hdd_data_raidz/pve_backup
state: present
recordsize: 1M
- dataset: hdd_data_raidz/videos
state: present
recordsize: 1M
- dataset: hdd_data_raidz/music
state: present
recordsize: 1M
- dataset: hdd_data_raidz/tmp
state: present
- dataset: hdd_data_raidz/archiv
state: present
- dataset: hdd_data_raidz/bilder
state: present
recordsize: 1M
- dataset: hdd_data_raidz/hm
state: present
- dataset: hdd_data_raidz/scans
state: present
- dataset: hdd_data_raidz/restic
state: present
- dataset: hdd_data_raidz/backup
state: present
- dataset: hdd_data_raidz/buecher
state: present
- dataset: hdd_data_raidz/programme
state: present
- dataset: hdd_data_raidz/vm
state: present
zfs_extra_arc_max_size: "8589934592" # 8GB in Bytes
zfs_extra_zfs_pools:
- name: "rpool"
systemd_timer_schedule: "*-01,04,07,10-01 23:00" # jeden ersten eines jeden Quartals
- name: "hdd_data_raidz"
systemd_timer_schedule: "*-01,04,07,10-01 23:00"
### mgrote.zfs_sanoid
sanoid_datasets:
- path: 'hdd_data_raidz/videos'
template: '3tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/music'
template: '14tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/papa_backup'
template: '14tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/tmp'
template: '3tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/pve_backup'
template: '3tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/archiv'
template: '14tage'
recursive: 'yes'
snapshots: true
- path: hdd_data_raidz/bilder
recursive: 'no'
snapshots: true
template: '14tage'
- path: hdd_data_raidz/hm
recursive: 'no'
snapshots: true
template: '14tage'
- path: hdd_data_raidz/scans
recursive: 'no'
snapshots: true
template: '3tage'
- path: hdd_data_raidz/backup
recursive: 'no'
snapshots: true
template: '31tage'
- path: hdd_data_raidz/restic
recursive: 'no'
snapshots: true
template: '3tage'
- path: hdd_data_raidz/programme
recursive: 'no'
snapshots: true
template: '14tage'
- path: hdd_data_raidz/buecher
recursive: 'no'
snapshots: true
template: '14tage'
- path: hdd_data_raidz/vm
recursive: 'no'
snapshots: true
template: 'pve3tage'
- path: rpool
recursive: 'no'
snapshots: true
template: 'pve3tage'
- path: rpool/ROOT
recursive: 'no'
snapshots: true
template: 'pve3tage'
- path: rpool/ROOT/pve-1
recursive: 'no'
snapshots: true
template: '3tage'
### mgrote.cv4pve-autosnap
cv4pve_api_user: root@pam!cv4pve-autosnap
cv4pve_api_token: "{{ lookup('keepass', 'cv4pve_api_token_pve2', 'password') }}"
cv4pve_vmid: all,-127,-112,-116,-101,-102,-106 # k3s* wieder in snaps aktivieren!
cv4pve_keep_snapshots: 5
cv4pve_dl_link: "https://github.com/Corsinvest/cv4pve-autosnap/releases/download/v1.10.0/cv4pve-autosnap-linux-x64.zip"
### mgrote.proxmox_bind_mounts
pve_bind_mounts:
- vmid: 127
mp_nr: 0
mp_path_host: /hdd_data_raidz/videos
mp_path_guest: /shares_videos
- vmid: 127
mp_nr: 2
mp_path_host: /hdd_data_raidz/pve_backup
mp_path_guest: /shares_pve_backup
- vmid: 127
mp_nr: 3
mp_path_host: /hdd_data_raidz/papa_backup
mp_path_guest: /shares_papa_backup
- vmid: 127
mp_nr: 4
mp_path_host: /hdd_data_raidz/music
mp_path_guest: /shares_music
- vmid: 127
mp_nr: 5
mp_path_host: /hdd_data_raidz/tmp
mp_path_guest: /shares_tmp
- vmid: 127
mp_nr: 6
mp_path_host: /hdd_data_raidz/archiv
mp_path_guest: /shares_archiv
- vmid: 127
mp_nr: 7
mp_path_host: /hdd_data_raidz/bilder
mp_path_guest: /shares_bilder
- vmid: 127
mp_nr: 8
mp_path_host: /hdd_data_raidz/hm
mp_path_guest: /shares_hm
- vmid: 127
mp_nr: 9
mp_path_host: /hdd_data_raidz/scans
mp_path_guest: /shares_scans
- vmid: 127
mp_nr: 10
mp_path_host: /hdd_data_raidz/restic
mp_path_guest: /shares_restic
- vmid: 127
mp_nr: 12
mp_path_host: /hdd_data_raidz/backup
mp_path_guest: /shares_backup
- vmid: 127
mp_nr: 14
mp_path_host: /hdd_data_raidz/buecher
mp_path_guest: /shares_buecher
- vmid: 127
mp_nr: 15
mp_path_host: /hdd_data_raidz/programme
mp_path_guest: /shares_programme
- vmid: 127
mp_nr: 16
mp_path_host: /hdd_data_raidz/vm
mp_path_guest: /shares_vm
### mgrote.munin-node
munin_node_plugins:
- name: timesync
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/systemd/timesync_status
- name: systemd_status
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/systemd/systemd_status
- name: systemd_mem
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/systemd/systemd_mem
config: |
[systemd_mem]
env.all_services true
- name: lvm_
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/disk/lvm_
config: |
[lvm_*]
user root
- name: fail2ban
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/extern/fail2ban
config: |
[fail2ban]
env.client /usr/bin/fail2ban-client
env.config_dir /etc/fail2ban
user root
- name: zfs_arcstats
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/zfs/zfs_arcstats
- name: zfsonlinux_stats_
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/zfs/zfsonlinux_stats_
- name: zpool_iostat
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/zfs/zpool_iostat
- name: zfs_list
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/zfs/zfs_list
config: |
[zfs_list]
env.ignore_datasets_pattern autodaily
- name: zpool_capacity
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/zfs/zpool_capacity
- name: kvm_mem
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/libvirt/kvm_mem
- name: kvm_net
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/libvirt/kvm_net
- name: kvm_io
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/libvirt/kvm_io
config: |
[kvm_io]
user root
- name: kvm_cpu
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/libvirt/kvm_cpu
- name: proxmox_count
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/proxmox/proxmox_vm_count
config: |
[proxmox_count]
user root
group root
- name: zfs_count
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/zfs/zfs_pool_dataset_count
- name: ksm_
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/system/kernel_same_page_merging
- name: apcupsd_ww
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/power/apcupsd_ww
- name: apcupsd_pwr
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/extern/apcupsd_pwr
config: |
[apcupsd_pwr]
env.ups_model APC-BX950U-GR
- name: http_response
src: https://git.mgrote.net/mg/mirror-munin-contrib/raw/branch/master/plugins/http/http_response
config: |
[http_response]
env.sites https://pve5.grote.lan:8006
env.max_time 20
env.short_label true
env.follow_redirect true