2023-10-25 22:26:17 +02:00
# noqa yaml[truthy]---
# rpool ist unverschlüsselt als Boot-Medium
# der Speicherort fur die VMs ist verschlüsselt
# zfs create -o encryption=aes-256-gcm -o keyformat=passphrase rpool/vm
# entschlüsseln nach Boot mit: sudo zpool import -d /dev/disk/by-id/ -a && sudo zfs mount -a -l
2023-11-29 22:24:57 +01:00
## hdd_data
### sudo zpool create -o ashift=12 -o feature@encryption=enabled -O encryption=on -O keylocation=prompt -O keyformat=passphrase hdd_data mirror /dev/disk/by-id/ata-TOSHIBA_MG09ACA18TE_Z1B0A27KFJDH /dev/disk/by-id/ata-ST18000NM003D-3DL103_ZVTBSAYS
2022-11-03 21:12:54 +01:00
2023-12-15 11:21:00 +01:00
## hdd_data "neu"
### sudo zpool create -o ashift=12 -o feature@encryption=enabled -O encryption=on -O keylocation=prompt -O keyformat=passphrase hdd_data /dev/disk/by-id/ata-ST18000NM003D-3DL103_ZVTBSAYS
2023-10-25 22:26:17 +02:00
# mgrote.zfs_manage_datasets
2023-11-29 21:15:50 +01:00
### mgrote_zfs_extra
2023-10-25 22:26:17 +02:00
# Variablen für mgrote.zfs_health/trim/scrub/zed/arc_mem/ sind zusammengefasst unter zfs_extra_*
zfs_datasets : # DatenPools werden hier nicht verwaltet
# rpool - System-Datasets
- dataset : rpool
state : present
compression : zstd
sync : disabled
xattr : sa
dnodesize : auto
atime : on # noqa yaml[truthy]
snapdir : hidden
reservation : 1G
refreservation : 10G
2023-12-04 14:48:02 +01:00
acltype : posix
2023-10-25 22:26:17 +02:00
- dataset : rpool/ROOT
state : present
refreservation : 10G
- dataset : rpool/ROOT/pve-1
state : present
refreservation : 10G
2023-12-04 14:43:04 +01:00
acltype : posix # https://docs.ansible.com/ansible-core/2.14/playbook_guide/playbooks_privilege_escalation.html#risks-of-becoming-an-unprivileged-user ; sonst kann die dotfiles-Rolle kein setfacl machen
2023-10-25 22:26:17 +02:00
# rpool - Data
- dataset : rpool/data
state : present
2023-12-04 14:43:04 +01:00
2023-10-25 22:26:17 +02:00
# rpool - VMs
- dataset : rpool/vm
state : present
- dataset : rpool/vm/zvol
state : present
- dataset : rpool/vm/lxc
state : present
- dataset : rpool/data
state : present
2023-12-04 14:43:04 +01:00
2023-11-29 22:24:57 +01:00
# hdd_data
- dataset : hdd_data
2023-10-25 22:26:17 +02:00
state : present
compression : zstd
sync : disabled
xattr : sa
dnodesize : auto
atime : on # noqa yaml[truthy]
snapdir : hidden
reservation : 1G
2023-12-15 11:21:00 +01:00
acltype : posix # https://docs.ansible.com/ansible-core/2.14/playbook_guide/playbooks_privilege_escalation.html#risks-of-becoming-an-unprivileged-user ; sonst kann die dotfiles-Rolle kein setfacl machen
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/papa_backup
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/pve_backup
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/videos
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/music
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/tmp
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/archiv
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/bilder
2023-10-25 22:26:17 +02:00
state : present
recordsize : 1M
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/scans
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/restic
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/backup
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/buecher
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/programme
2023-10-25 22:26:17 +02:00
state : present
2023-11-29 22:24:57 +01:00
- dataset : hdd_data/vm
2023-10-25 22:26:17 +02:00
state : present
2022-11-03 21:12:54 +01:00
2023-10-25 22:26:17 +02:00
zfs_extra_arc_max_size : "8589934592" # 8GB in Bytes
zfs_extra_zfs_pools :
- name : "rpool"
systemd_timer_schedule : "*-01,04,07,10-01 23:00" # jeden ersten eines jeden Quartals
2023-11-29 22:24:57 +01:00
- name : "hdd_data"
2023-10-25 22:26:17 +02:00
systemd_timer_schedule : "*-01,04,07,10-01 23:00"
2022-11-03 21:12:54 +01:00
2023-11-29 21:15:50 +01:00
### mgrote_zfs_sanoid
2023-10-25 22:26:17 +02:00
sanoid_snaps_enable : true
## enable sending snaps
2024-10-28 18:16:31 +01:00
sanoid_syncoid_source_host : false # kann weg
sanoid_syncoid_ssh_pubkey : "{{ lookup('viczem.keepass.keepass', 'sanoid_syncoid_public_key', 'notes') }}" # kann weg
2023-10-25 22:26:17 +02:00
sanoid_datasets :
2023-11-29 22:24:57 +01:00
### hdd_data
- path : 'hdd_data/videos'
2024-08-05 15:23:02 +02:00
template : '14tage'
2023-10-25 22:26:17 +02:00
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/music'
2023-10-25 22:26:17 +02:00
template : '14tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/papa_backup'
2023-10-25 22:26:17 +02:00
template : '14tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/tmp'
2024-08-05 15:23:02 +02:00
template : '14tage'
2023-10-25 22:26:17 +02:00
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/pve_backup'
2024-08-05 15:23:02 +02:00
template : '14tage'
2023-10-25 22:26:17 +02:00
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : 'hdd_data/archiv'
2023-10-25 22:26:17 +02:00
template : '14tage'
recursive : 'yes'
snapshots : true
2023-11-29 22:24:57 +01:00
- path : hdd_data/bilder
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/scans
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
2024-08-05 15:23:02 +02:00
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/backup
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '31tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/restic
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
2024-08-05 15:23:02 +02:00
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/programme
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/buecher
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
template : '14tage'
2023-11-29 22:24:57 +01:00
- path : hdd_data/vm
2023-10-25 22:26:17 +02:00
recursive : 'no' # noqa yaml[truthy]
snapshots : true
2024-08-05 15:23:02 +02:00
template : '14tage'
2023-10-25 22:26:17 +02:00
### rpool
- path : rpool
recursive : 'no' # noqa yaml[truthy]
snapshots : true
2024-08-05 15:23:02 +02:00
template : 'pve14tage'
2023-10-25 22:26:17 +02:00
- path : rpool/ROOT
recursive : 'no' # noqa yaml[truthy]
snapshots : true
2024-08-05 15:23:02 +02:00
template : 'pve14tage'
2023-10-25 22:26:17 +02:00
- path : rpool/ROOT/pve-1
recursive : 'no' # noqa yaml[truthy]
snapshots : true
2024-08-05 15:23:02 +02:00
template : 'pve14tage'
2022-11-03 21:12:54 +01:00
2023-11-29 21:15:50 +01:00
### mgrote_proxmox_bind_mounts
2023-10-25 22:26:17 +02:00
pve_bind_mounts :
### fileserver3
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 0
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/videos
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_videos
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 2
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/pve_backup
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_pve_backup
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 3
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/papa_backup
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_papa_backup
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 4
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/music
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_music
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 5
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/tmp
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_tmp
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 6
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/archiv
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_archiv
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 7
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/bilder
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_bilder
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 9
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/scans
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_scans
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 10
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/restic
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_restic
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 12
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/backup
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_backup
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 14
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/buecher
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_buecher
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 15
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/programme
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_programme
2024-10-04 14:50:10 +02:00
- vmid : 107
2023-10-25 22:26:17 +02:00
mp_nr : 16
2023-11-29 22:24:57 +01:00
mp_path_host : /hdd_data/vm
2023-10-25 22:26:17 +02:00
mp_path_guest : /shares_vm
2023-07-05 11:47:57 +02:00
2023-10-25 22:26:17 +02:00
# mgrote.pbs_pve_integration
pve_pbs_datastore :
- name : pbs
2023-11-25 19:08:24 +01:00
server : pbs.mgrote.net
2023-10-25 22:26:17 +02:00
datastore : zfs_backup
username : user_pve5@pbs
2024-07-09 17:35:56 +02:00
password : "{{ lookup('viczem.keepass.keepass', 'pbs_pve_user', 'password') }}"
2023-10-25 22:26:17 +02:00
fingerprint : "7F:AC:54:75:1C:33:55:84:1E:1E:3A:15:5A:5E:AF:79:33:C9:D4:E1:C0:A0:1C:0D:9E:6A:EA:82:F9:27:57:79"