2023-10-25 22:26:17 +02:00
# noqa yaml[truthy]---
# rpool ist unverschlüsselt als Boot-Medium
# der Speicherort fur die VMs ist verschlüsselt
# zfs create -o encryption=aes-256-gcm -o keyformat=passphrase rpool/vm
# entschlüsseln nach Boot mit: sudo zpool import -d /dev/disk/by-id/ -a && sudo zfs mount -a -l
2023-11-29 22:24:57 +01:00
## hdd_data
### sudo zpool create -o ashift=12 -o feature@encryption=enabled -O encryption=on -O keylocation=prompt -O keyformat=passphrase hdd_data mirror /dev/disk/by-id/ata-TOSHIBA_MG09ACA18TE_Z1B0A27KFJDH /dev/disk/by-id/ata-ST18000NM003D-3DL103_ZVTBSAYS
2022-11-03 21:12:54 +01:00
2023-12-15 11:21:00 +01:00
## hdd_data "neu"
### sudo zpool create -o ashift=12 -o feature@encryption=enabled -O encryption=on -O keylocation=prompt -O keyformat=passphrase hdd_data /dev/disk/by-id/ata-ST18000NM003D-3DL103_ZVTBSAYS
2023-10-25 22:26:17 +02:00
# mgrote.zfs_manage_datasets
2023-11-29 21:15:50 +01:00
### mgrote_zfs_extra
2023-10-25 22:26:17 +02:00
# Variablen für mgrote.zfs_health/trim/scrub/zed/arc_mem/ sind zusammengefasst unter zfs_extra_*
zfs_datasets : # DatenPools werden hier nicht verwaltet
# rpool - System-Datasets
- dataset : rpool
state : present
compression : zstd
sync : disabled
xattr : sa
dnodesize : auto
atime : on # noqa yaml[truthy]
snapdir : hidden
reservation : 1G
refreservation : 10G
2023-12-04 14:48:02 +01:00
acltype : posix
2023-10-25 22:26:17 +02:00
- dataset : rpool/ROOT
state : present
refreservation : 10G
- dataset : rpool/ROOT/pve-1
state : present
refreservation : 10G
2023-12-04 14:43:04 +01:00
acltype : posix # https://docs.ansible.com/ansible-core/2.14/playbook_guide/playbooks_privilege_escalation.html#risks-of-becoming-an-unprivileged-user ; sonst kann die dotfiles-Rolle kein setfacl machen
2023-10-25 22:26:17 +02:00
# rpool - Data
- dataset : rpool/data
state : present
2023-12-04 14:43:04 +01:00
2023-10-25 22:26:17 +02:00
# rpool - VMs
- dataset : rpool/vm
state : present
- dataset : rpool/vm/zvol
state : present
- dataset : rpool/vm/lxc
state : present
- dataset : rpool/data
state : present
2023-12-04 14:43:04 +01:00
2023-11-29 22:24:57 +01:00
# hdd_data
- dataset : hdd_data
2023-10-25 22:26:17 +02:00
state : present
compression : zstd
sync : disabled
xattr : sa
dnodesize : auto
atime : on # noqa yaml[truthy]
snapdir : hidden
reservation : 1G
2023-12-15 11:21:00 +01:00
acltype : posix # https://docs.ansible.com/ansible-core/2.14/playbook_guide/playbooks_privilege_escalation.html#risks-of-becoming-an-unprivileged-user ; sonst kann die dotfiles-Rolle kein setfacl machen
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/papa_backup
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/pve_backup
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/videos
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/music
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/tmp
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/archiv
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/bilder
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/scans
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/restic
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/backup
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/buecher
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/programme
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/vm
2023-10-25 22:26:17 +02:00
state : present
2022-11-03 21:12:54 +01:00
2023-10-25 22:26:17 +02:00
zfs_extra_arc_max_size : "8589934592" # 8GB in Bytes
zfs_extra_zfs_pools :
- name : "rpool"
systemd_timer_schedule : "*-01,04,07,10-01 23:00" # jeden ersten eines jeden Quartals
2023-11-29 22:24:57 +01:00
- name : "hdd_data"
2023-10-25 22:26:17 +02:00
systemd_timer_schedule : "*-01,04,07,10-01 23:00"
2022-11-03 21:12:54 +01:00
2023-11-29 21:15:50 +01:00
### mgrote_zfs_sanoid
2023-10-25 22:26:17 +02:00
sanoid_snaps_enable : true
## enable sending snaps
sanoid_syncoid_source_host : true
sanoid_syncoid_ssh_pubkey : "{{ lookup('keepass', 'sanoid_syncoid_public_key', 'notes') }}"
sanoid_datasets :
2023-11-29 22:24:57 +01:00
### hdd_data
- path : 'hdd_data/videos'
2023-10-25 22:26:17 +02:00
template : '3tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/music'
2023-10-25 22:26:17 +02:00
template : '14tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/papa_backup'
2023-10-25 22:26:17 +02:00
template : '14tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/tmp'
2023-10-25 22:26:17 +02:00
template : '3tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/pve_backup'
2023-10-25 22:26:17 +02:00
template : '3tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/archiv'
2023-10-25 22:26:17 +02:00
template : '14tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : hdd_data/bilder
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/scans
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '3tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/backup
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '31tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/restic
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '3tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/programme
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/buecher
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/vm
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '3tage'
### rpool
- path : rpool
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : 'pve3tage'
- path : rpool/ROOT
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : 'pve3tage'
- path : rpool/ROOT/pve-1
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : 'pve3tage'
2022-11-03 21:12:54 +01:00
2023-11-29 21:15:50 +01:00
### mgrote_cv4pve-autosnap
2023-10-25 22:26:17 +02:00
cv4pve_api_user : root@pam!cv4pve-autosnap
cv4pve_api_token : "{{ lookup('keepass', 'cv4pve_api_token', 'password') }}"
2023-11-21 21:20:53 +01:00
cv4pve_vmid : all,-115
2023-10-25 22:26:17 +02:00
cv4pve_keep_snapshots : 5
cv4pve_dl_link : "https://github.com/Corsinvest/cv4pve-autosnap/releases/download/v1.14.7/cv4pve-autosnap-linux-x64.zip"
2022-11-03 21:12:54 +01:00
2023-11-29 21:15:50 +01:00
### mgrote_proxmox_bind_mounts
2023-10-25 22:26:17 +02:00
pve_bind_mounts :
### fileserver3
- vmid : 115
mp_nr : 0
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/videos
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_videos
- vmid : 115
mp_nr : 2
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/pve_backup
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_pve_backup
- vmid : 115
mp_nr : 3
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/papa_backup
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_papa_backup
- vmid : 115
mp_nr : 4
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/music
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_music
- vmid : 115
mp_nr : 5
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/tmp
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_tmp
- vmid : 115
mp_nr : 6
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/archiv
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_archiv
- vmid : 115
mp_nr : 7
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/bilder
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_bilder
- vmid : 115
mp_nr : 9
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/scans
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_scans
- vmid : 115
mp_nr : 10
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/restic
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_restic
- vmid : 115
mp_nr : 12
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/backup
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_backup
- vmid : 115
mp_nr : 14
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/buecher
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_buecher
- vmid : 115
mp_nr : 15
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/programme
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_programme
- vmid : 115
mp_nr : 16
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/vm
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_vm
2023-07-05 11:47:57 +02:00
2023-10-25 22:26:17 +02:00
# mgrote.pbs_pve_integration
pve_pbs_datastore :
- name : pbs
2023-11-25 19:08:24 +01:00
server : pbs.mgrote.net
2023-10-25 22:26:17 +02:00
datastore : zfs_backup
username : user_pve5@pbs
password : "{{ lookup('keepass', 'pbs_pve_user', 'password') }}"
fingerprint : "7F:AC:54:75:1C:33:55:84:1E:1E:3A:15:5A:5E:AF:79:33:C9:D4:E1:C0:A0:1C:0D:9E:6A:EA:82:F9:27:57:79"