homeserver/host_vars/pve2.grote.lan.yml

302 lines
8.6 KiB
YAML
Raw Normal View History

2021-02-07 10:53:08 +01:00
---
2021-02-07 12:46:03 +01:00
### mgrote.zfs_manage_datasets
# zfs create -o encryption=aes-256-gcm -o keyformat=passphrase hdd_data_raidz/data_crypt
# zfs create hdd_data_raidz/videos
# Pools müssen vorher erstellt sein!
2021-02-12 22:26:26 +01:00
# rpool
2021-02-18 08:36:43 +01:00
# zpool create -f -o ashift=12 rpool mirror ata-SAMSUNG_MZ7LH960HAJR-00005_S45NNC0R105094 /dev/disk/by-id/ata-SAMSUNG_MZ7LH960HAJR-00005_S45NNC0R105095
2021-02-07 12:46:03 +01:00
# HDD_DATA_RAIDZ
# zpool create -f -o ashift=12 hdd_data_raidz raidz /dev/disk/by-id/ata-WDC_WD80EZAZ-11TDBA0_2SG991TJ /dev/disk/by-id/ata-WDC_WD80EZAZ-11TDBA0_2SGA23EJ /dev/disk/by-id/ata-ST8000DM004-2CX188_ZCT1AK0F
2021-02-18 07:54:24 +01:00
zfs_datasets: # DatenPools werden hier nicht verwaltet
2021-02-18 08:36:43 +01:00
- dataset: rpool/vm
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
2021-02-12 22:26:26 +01:00
- dataset: rpool/vm/dir
2021-02-07 12:46:03 +01:00
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
2021-02-12 22:26:26 +01:00
- dataset: rpool/vm/zvol
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/qcow
2021-02-07 12:46:03 +01:00
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/papa_backup
state: present
compression: zstd
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/pve_backup
state: present
compression: zstd
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/videos
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/music
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/data_crypt
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/tmp
state: present
compression: zstd
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: hdd_data_raidz/archiv
state: present
compression: zstd
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/data
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/papa
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/proxmox
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/videos
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/music
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/tmp
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
- dataset: rpool/vm/dir/vm-158/archiv
state: present
compression: lz4
sync: disabled
xattr: sa
dnodesize: auto
atime: on
snapdir: hidden
2021-02-07 12:46:03 +01:00
### mgrote.apcupsd
apcupsd_nis_master: true
apcupsd_nis_master_hostname: pve2.grote.lan
2021-02-13 16:18:29 +01:00
### mgrote.zfs_extra # Variablen für mgrote.zfs_health/trim/scrub/zed/arc_mem/ sind zusammengefasst unter zfs_extra_*
zfs_extra_arc_max_size: "17179869184" # 16GB in Bytes
zfs_extra_max_usage_health: "90"
2021-02-13 16:18:29 +01:00
zfs_extra_zfs_pools:
2021-02-18 08:36:43 +01:00
- name: "rpool"
2021-02-12 10:32:22 +01:00
cron_minutes_zfs_scrub: "0"
cron_hour_zfs_scrub: "23"
cron_day_of_month_zfs_scrub: "14"
cron_day_of_week_zfs_scrub: "*"
cron_month_zfs_scrub: "*/2"
2021-02-12 10:32:22 +01:00
- name: "hdd_data_raidz"
cron_minutes_zfs_scrub: "0"
cron_hour_zfs_scrub: "23"
cron_day_of_month_zfs_scrub: "14"
cron_day_of_week_zfs_scrub: "*"
cron_month_zfs_scrub: "*/2"
2021-02-12 10:32:22 +01:00
### mgrote.zfs_sanoid
sanoid_datasets:
- path: 'hdd_data_raidz/data_crypt'
template: '31tage'
recursive: 'no'
snapshots: true
- path: 'hdd_data_raidz/videos'
template: '14tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/music'
template: '14tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/papa_backup'
template: '14tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/tmp'
template: '3tage'
recursive: 'yes'
snapshots: true
- path: 'hdd_data_raidz/pve_backup'
snapshots: false # deaktiviert sanoid für das dataset
- path: 'hdd_data_raidz/archiv'
template: '3monate'
recursive: 'yes'
snapshots: true
2021-02-12 10:32:22 +01:00
sanoid_templates:
- name: '31tage'
2021-02-12 10:32:22 +01:00
keep_hourly: '24' # Aufheben (Stunde)
keep_daily: '31' # Aufheben (Tage)
keep_monthly: '3' # Aufheben (Monate)
2021-02-12 10:32:22 +01:00
keep_yearly: '0' # Aufheben (Jahre)
frequently: '16' # Aufheben (Minuten)
frequent_period: '15' # Intervall (alle 5 Minuten)
2021-02-12 10:32:22 +01:00
autosnap: 'yes' # Automatisches erstellen von Snapshots
autoprune: 'yes'
- name: '14tage'
keep_hourly: '0'
keep_daily: '14'
keep_monthly: '0'
keep_yearly: '0'
2021-02-12 10:32:22 +01:00
frequently: '0'
frequent_period: '0'
autosnap: 'yes'
autoprune: 'yes'
- name: '3tage'
keep_hourly: '0'
keep_daily: '3'
keep_monthly: '0'
keep_yearly: '0'
frequently: '0'
frequent_period: '0'
autosnap: 'yes'
autoprune: 'yes'
- name: '3monate'
keep_hourly: '0'
keep_daily: '3'
keep_monthly: '3'
keep_yearly: '0'
frequently: '0'
frequent_period: '0'
autosnap: 'yes'
autoprune: 'yes'
2021-02-12 10:32:22 +01:00
### mgrote.cv4pve-autosnap
cv4pve_api_user: root@pam!cv4pve-autosnap
cv4pve_api_token: "{{ lookup('keepass', 'cv4pve_api_token', 'password') }}"
cv4pve_vmid: all,-127,-158,-112,-100
2021-02-12 22:26:26 +01:00
cv4pve_keep_snapshots: 1
cv4pve_dl_link: "https://github.com/Corsinvest/cv4pve-autosnap/releases/download/v1.10.0/cv4pve-autosnap-linux-x64.zip"
### geerlingguy.munin-node
munin_node_plugins:
- name: apc_nis
munin_node_config: {
"apc_nis": {
"env.host": "pve2.grote.lan",
"env.port": "3551"
}
}
munin_node_additional_plugins:
- name: chrony
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/chrony
- name: systemd_status
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/systemd_status
- name: zfs_arcstats
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/zfs_arcstats
- name: zfsonlinux_stats_
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/zfsonlinux_stats_
- name: zpool_iostat
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/zpool_iostat
- name: zfs_list
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/zfs_list
- name: zpool_capacity
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/zpool_capacity
- name: kvm_mem
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/kvm_mem
- name: kvm_net
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/kvm_net
- name: apcupsd_pwr
src: https://git.mgrote.net/mg/munin-plugins/raw/branch/master/apcupsd_pwr
- name: apc_nis # ist lokal vorhanden
munin_node_disabled_plugins:
- name: meminfo # zu hohe last
- name: hddtemp2 # ersetzt durch hddtemp_smartctl
- name: squid_cache
- name: squid_objectsize
- name: squid_requests
- name: squid_traffic
- name: nfsd
- name: samba
- name: nfsd4
- name: ntp # verursacht zu viele dns ptr request
- name: cronjobs
- name: hddtempd # ersetzt durch hddtemp_smartctl
- name: ipmi_power # für pve2, leeres diagramm
- name: fail2ban
- name: fail2ban_
- name: apcupsd_pct
- name: kvm_io
- name: kvm_cpu
- name: docker_mem
- name: lvm_