Compare commits

...

47 commits

Author SHA1 Message Date
9f4272be80 navidrome: configure playlists dir
All checks were successful
ansible-lint / gitleaks (push) Successful in 11s
ansible-lint / Ansible Lint (push) Successful in 1m28s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-12-23 13:56:41 +01:00
3d50137374 gramps: rename containers
All checks were successful
ansible-lint / gitleaks (push) Successful in 11s
ansible-lint / Ansible Lint (push) Successful in 1m36s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-12-23 12:29:05 +00:00
52bc3a35bf navidrome: set new name without "-mg"
All checks were successful
ansible-lint / gitleaks (push) Successful in 9s
ansible-lint / Ansible Lint (push) Successful in 1m51s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-12-23 13:17:13 +01:00
4e61ed497d navidrome: set CoverArtPriority
All checks were successful
ansible-lint / gitleaks (push) Successful in 11s
ansible-lint / Ansible Lint (push) Successful in 1m23s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-12-23 12:01:23 +01:00
eac08c2c65 navidrome: disable Anonymous Data Collection
All checks were successful
ansible-lint / gitleaks (push) Successful in 9s
ansible-lint / Ansible Lint (push) Successful in 1m16s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-12-23 09:19:30 +01:00
af6537ee62 remove duplicate rsync_mirror entry
All checks were successful
ansible-lint / gitleaks (push) Successful in 19s
ansible-lint / Ansible Lint (push) Successful in 2m19s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-12-23 09:13:04 +01:00
8d38bb1ce5 chore(deps): update deluan/navidrome docker tag to v0.54.2
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 19s
ansible-lint / gitleaks (push) Successful in 34s
ansible-lint / Ansible Lint (pull_request) Successful in 3m45s
ansible-lint / Ansible Lint (push) Successful in 3m35s
2024-12-22 00:12:27 +00:00
67b621adbc chore(deps): update deluan/navidrome docker tag to v0.54.1
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 8s
ansible-lint / gitleaks (push) Successful in 8s
ansible-lint / Ansible Lint (pull_request) Successful in 1m23s
ansible-lint / Ansible Lint (push) Successful in 1m22s
2024-12-21 08:07:27 +00:00
17af72094b chore(deps): update deluan/navidrome docker tag to v0.54.0
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 11s
ansible-lint / gitleaks (push) Successful in 10s
ansible-lint / Ansible Lint (pull_request) Successful in 1m38s
ansible-lint / Ansible Lint (push) Successful in 1m33s
2024-12-21 04:07:27 +00:00
8325123ff4 chore(deps): update ghcr.io/miniflux/miniflux docker tag to v2.2.4
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 8s
ansible-lint / gitleaks (push) Successful in 8s
ansible-lint / Ansible Lint (pull_request) Successful in 1m24s
ansible-lint / Ansible Lint (push) Successful in 1m23s
2024-12-21 00:07:18 +00:00
3f156b0dcf chore(deps): update zricethezav/gitleaks docker tag to v8.22.0
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 25s
ansible-lint / gitleaks (push) Successful in 9s
ansible-lint / Ansible Lint (pull_request) Successful in 1m27s
ansible-lint / Ansible Lint (push) Successful in 1m26s
2024-12-20 16:29:38 +00:00
c8888a8aaa chore(deps): update zricethezav/gitleaks docker tag to v8.21.4
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 9s
ansible-lint / gitleaks (push) Successful in 9s
ansible-lint / Ansible Lint (pull_request) Successful in 1m38s
ansible-lint / Ansible Lint (push) Successful in 1m28s
2024-12-20 16:09:57 +00:00
f6d5330dbc chore(deps): update dependency corsinvest/cv4pve-autosnap to v1.15.0
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 10s
ansible-lint / gitleaks (push) Successful in 8s
ansible-lint / Ansible Lint (pull_request) Successful in 1m30s
ansible-lint / Ansible Lint (push) Successful in 1m27s
2024-12-19 20:07:18 +00:00
279cba998c chore(deps): update traefik docker tag to v3.2.3
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 12s
ansible-lint / gitleaks (push) Successful in 9s
ansible-lint / Ansible Lint (push) Successful in 2m28s
ansible-lint / Ansible Lint (pull_request) Successful in 2m39s
2024-12-16 20:07:13 +00:00
mg
139f8dc65f revert 9296d195ff
All checks were successful
ansible-lint / gitleaks (push) Successful in 2m51s
ansible-lint / Ansible Lint (push) Successful in 3m46s
revert Revert "chore(deps): update nextcloud docker tag to v30.0.4"

This reverts commit 080f5b3dee.
2024-12-15 20:55:21 +01:00
mg
9724851ec8 revert 6b7f1dfd37
Some checks failed
ansible-lint / Ansible Lint (push) Has been cancelled
ansible-lint / gitleaks (push) Has been cancelled
revert Revert "chore(deps): update traefik docker tag to v3.2.2"

This reverts commit 70b8983cf7.
2024-12-15 20:55:00 +01:00
6b7f1dfd37 Revert "chore(deps): update traefik docker tag to v3.2.2"
Some checks failed
ansible-lint / Ansible Lint (push) Has been cancelled
ansible-lint / gitleaks (push) Has been cancelled
This reverts commit 70b8983cf7.
2024-12-15 19:47:51 +00:00
9296d195ff Revert "chore(deps): update nextcloud docker tag to v30.0.4"
This reverts commit 080f5b3dee.
2024-12-15 19:47:44 +00:00
080f5b3dee chore(deps): update nextcloud docker tag to v30.0.4
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 7s
ansible-lint / gitleaks (push) Successful in 8s
ansible-lint / Ansible Lint (pull_request) Successful in 1m22s
ansible-lint / Ansible Lint (push) Successful in 1m19s
2024-12-13 04:07:19 +00:00
70b8983cf7 chore(deps): update traefik docker tag to v3.2.2
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 9s
ansible-lint / gitleaks (push) Successful in 39s
ansible-lint / Ansible Lint (pull_request) Successful in 2m6s
ansible-lint / Ansible Lint (push) Successful in 2m1s
2024-12-11 00:07:23 +00:00
2f23d23aeb chore(deps): update docker.io/mongo docker tag to v8.0.4
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 17s
ansible-lint / gitleaks (push) Successful in 10s
ansible-lint / Ansible Lint (pull_request) Successful in 2m40s
ansible-lint / Ansible Lint (push) Successful in 2m33s
2024-12-09 20:17:40 +00:00
a419eee51b chore(deps): update ghcr.io/gramps-project/grampsweb docker tag to v24.12.1
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 7s
ansible-lint / gitleaks (push) Successful in 7s
ansible-lint / Ansible Lint (pull_request) Successful in 1m7s
ansible-lint / Ansible Lint (push) Successful in 1m3s
2024-12-07 16:07:01 +00:00
986a59f629 chore(deps): update nextcloud docker tag to v30.0.3
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 5s
ansible-lint / gitleaks (push) Successful in 7s
ansible-lint / Ansible Lint (pull_request) Successful in 50s
ansible-lint / Ansible Lint (push) Successful in 48s
2024-12-06 04:06:58 +00:00
c2e39b8469 Revert "forgejo: set anything to private, fucking scraper (#253)"
All checks were successful
ansible-lint / Ansible Lint (push) Successful in 1m53s
ansible-lint / gitleaks (push) Successful in 7s
This reverts commit e3f06f23e7.
2024-12-01 16:09:26 +01:00
de268c96d9 authelia: remove registry-ui (#254)
All checks were successful
ansible-lint / gitleaks (push) Successful in 5s
ansible-lint / Ansible Lint (push) Successful in 39s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
Reviewed-on: #254
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-25 20:12:17 +01:00
e3f06f23e7 forgejo: set anything to private, fucking scraper (#253)
All checks were successful
ansible-lint / gitleaks (push) Successful in 7s
ansible-lint / Ansible Lint (push) Successful in 44s
Reviewed-on: #253
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-25 19:59:54 +01:00
e0f8219b8f remove minio (#252)
All checks were successful
ansible-lint / gitleaks (push) Successful in 6s
ansible-lint / Ansible Lint (push) Successful in 42s
Reviewed-on: #252
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-25 18:52:01 +01:00
61fc3f4afb authelia: fix matrix
All checks were successful
ansible-lint / gitleaks (push) Successful in 5s
ansible-lint / Ansible Lint (push) Successful in 41s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-11-24 21:13:05 +01:00
a4444df568 authelia: enable password reset (#251)
All checks were successful
ansible-lint / gitleaks (push) Successful in 7s
ansible-lint / Ansible Lint (push) Successful in 46s
docker-compose/nextcloud/ldap.sh.j2

Signed-off-by: Michael Grote <michael.grote@posteo.de>

Reviewed-on: #251
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-24 21:08:55 +01:00
79e0382469 navidrome: sort options and add ND_RECENTLYADDEDBYMODTIME
All checks were successful
ansible-lint / gitleaks (push) Successful in 6s
ansible-lint / Ansible Lint (push) Successful in 46s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-11-24 18:53:26 +01:00
158057bf5b miniflux: rss-filter
All checks were successful
ansible-lint / gitleaks (push) Successful in 5s
ansible-lint / Ansible Lint (push) Successful in 42s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-11-23 10:11:25 +00:00
b0ac628993 fix "[BUG] Fix config name in warning "Set server_key to an empty string""
All checks were successful
ansible-lint / gitleaks (push) Successful in 5s
ansible-lint / Ansible Lint (push) Successful in 47s
https://github.com/lldap/lldap/issues/1032
2024-11-23 10:01:07 +00:00
831f1256eb chore(deps): update postgres docker tag to v17.2
Some checks failed
ansible-lint / gitleaks (pull_request) Successful in 14s
ansible-lint / Ansible Lint (pull_request) Failing after 11m24s
ansible-lint / gitleaks (push) Successful in 6s
ansible-lint / Ansible Lint (push) Successful in 47s
2024-11-23 04:32:34 +00:00
16065d82d6 chore(deps): update mariadb docker tag to v11.6.2
Some checks failed
ansible-lint / Ansible Lint (push) Waiting to run
ansible-lint / gitleaks (push) Waiting to run
ansible-lint / gitleaks (pull_request) Failing after 12m34s
ansible-lint / Ansible Lint (pull_request) Failing after 12m40s
2024-11-23 04:21:28 +00:00
c2a5841e5c chore(deps): update lldap/lldap docker tag to v0.6.1
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 18m33s
ansible-lint / gitleaks (push) Successful in 17s
ansible-lint / Ansible Lint (pull_request) Successful in 19m24s
ansible-lint / Ansible Lint (push) Successful in 18m33s
2024-11-23 00:22:15 +00:00
1956e70a3a authelia: move outside traefik docker-compose.yml (#246)
All checks were successful
ansible-lint / gitleaks (push) Successful in 5s
ansible-lint / Ansible Lint (push) Successful in 40s
docker-compose/authelia/docker-compose.yml.j2
docker-compose/traefik/docker-compose.yml.j2

Signed-off-by: Michael Grote <michael.grote@posteo.de>

Reviewed-on: #246
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-21 13:00:22 +01:00
ee0e55d828 chore(deps): update traefik docker tag to v3.2.1
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 5s
ansible-lint / gitleaks (push) Successful in 6s
ansible-lint / Ansible Lint (pull_request) Successful in 47s
ansible-lint / Ansible Lint (push) Successful in 44s
2024-11-21 04:07:23 +00:00
1e3d57baf1 chore(deps): update ghcr.io/gramps-project/grampsweb docker tag to v24.11.0
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 11s
ansible-lint / gitleaks (push) Successful in 7s
ansible-lint / Ansible Lint (pull_request) Successful in 1m32s
ansible-lint / Ansible Lint (push) Successful in 1m24s
2024-11-18 08:07:05 +00:00
1c6c59d598 fix role docker_compose: pull and restart (#243)
All checks were successful
ansible-lint / gitleaks (push) Successful in 6s
ansible-lint / Ansible Lint (push) Successful in 51s
This reverts commit c8e7fe9dc6.

Reviewed-on: #243
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-15 21:37:29 +01:00
47b8b13139 act-runner: fix tag
Some checks failed
ansible-lint / gitleaks (push) Successful in 5s
ansible-lint / Ansible Lint (push) Failing after 13m41s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-11-15 20:22:13 +00:00
874cfbb3cc act-runner: set tag (#242)
All checks were successful
ansible-lint / gitleaks (push) Successful in 6s
ansible-lint / Ansible Lint (push) Successful in 59s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
Reviewed-on: #242
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-15 21:20:47 +01:00
c39d7f9fbc docker networks: standardize names (#241)
Some checks failed
ansible-lint / gitleaks (push) Successful in 15s
ansible-lint / Ansible Lint (push) Has been cancelled
docker-compose/nextcloud/docker-compose.yml.j2
docker-compose/registry/docker-compose.yml.j2
friedhof/lldap/docker-compose.yml.j2

Signed-off-by: Michael Grote <michael.grote@posteo.de>

Reviewed-on: #241
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-15 21:20:10 +01:00
c4ee82d996 chore(deps): update postgres docker tag to v17.1 (#240)
All checks were successful
ansible-lint / gitleaks (push) Successful in 4s
ansible-lint / Ansible Lint (push) Successful in 47s
This PR contains the following updates:

| Package | Update | Change |
|---|---|---|
| postgres | minor | `17.0` -> `17.1` |

---

### Configuration

📅 **Schedule**: Branch creation - At any time (no schedule defined), Automerge - At any time (no schedule defined).

🚦 **Automerge**: Disabled because a matching PR was automerged previously.

♻ **Rebasing**: Whenever PR is behind base branch, or you tick the rebase/retry checkbox.

🔕 **Ignore**: Close this PR and you won't be reminded about this update again.

---

 - [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check this box

---

This PR has been generated by [Renovate Bot](https://github.com/renovatebot/renovate).
<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiIzOS4xNy4wIiwidXBkYXRlZEluVmVyIjoiMzkuMTcuMCIsInRhcmdldEJyYW5jaCI6Im1hc3RlciIsImxhYmVscyI6W119-->

Reviewed-on: #240
Co-authored-by: Renovate Bot <renovate@mgrote.net>
Co-committed-by: Renovate Bot <renovate@mgrote.net>
2024-11-15 20:59:14 +01:00
b72ccc4a92 migrate lldap to docker (#238)
All checks were successful
ansible-lint / gitleaks (push) Successful in 4s
ansible-lint / Ansible Lint (push) Successful in 41s
docker-compose/lldap/lldap_config.toml.j2
host_vars/docker10.mgrote.net.yml

Signed-off-by: Michael Grote <michael.grote@posteo.de>

Reviewed-on: #238
Co-authored-by: Michael Grote <michael.grote@posteo.de>
Co-committed-by: Michael Grote <michael.grote@posteo.de>
2024-11-15 20:53:26 +01:00
b3c5a460ba chore(deps): update postgres docker tag to v17.1
All checks were successful
ansible-lint / gitleaks (pull_request) Successful in 5s
ansible-lint / gitleaks (push) Successful in 21s
ansible-lint / Ansible Lint (pull_request) Successful in 4m10s
ansible-lint / Ansible Lint (push) Successful in 4m7s
2024-11-15 00:05:52 +00:00
a3994c941f update lldap to v0.6.0
All checks were successful
ansible-lint / gitleaks (push) Successful in 3s
ansible-lint / Ansible Lint (push) Successful in 36s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-11-14 13:47:25 +01:00
ad4c287be5 munin: fix munin url
All checks were successful
ansible-lint / gitleaks (push) Successful in 4s
ansible-lint / Ansible Lint (push) Successful in 38s
Signed-off-by: Michael Grote <michael.grote@posteo.de>
2024-11-14 13:27:10 +01:00
42 changed files with 281 additions and 199 deletions

View file

@ -26,7 +26,7 @@ jobs:
uses: actions/checkout@v4 uses: actions/checkout@v4
- name: Run Gitleaks - name: Run Gitleaks
uses: docker://zricethezav/gitleaks:v8.21.2 uses: docker://zricethezav/gitleaks:v8.22.0
with: with:
args: detect --no-git --verbose --source ${{ github.workspace }} args: detect --no-git --verbose --source ${{ github.workspace }}

View file

@ -3,7 +3,7 @@
services: services:
runner: runner:
container_name: act-runner container_name: act-runner
image: gitea/act_runner image: gitea/act_runner:0.2.11
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
volumes: volumes:

View file

@ -21,10 +21,6 @@ access_control:
policy: one_factor policy: one_factor
subject: subject:
- 'group:authelia_wiki' - 'group:authelia_wiki'
- domain: rui.mgrote.net
policy: one_factor
subject:
- 'group:authelia_registry-ui'
session: session:
name: authelia_session name: authelia_session
@ -63,11 +59,11 @@ notifier:
# https://github.com/lldap/lldap/blob/main/example_configs/authelia_config.yml # https://github.com/lldap/lldap/blob/main/example_configs/authelia_config.yml
authentication_backend: authentication_backend:
password_reset: password_reset:
disable: true disable: false
refresh_interval: 1m refresh_interval: 1m
ldap: ldap:
implementation: custom implementation: custom
address: ldap://ldap.mgrote.net:3890 address: ldap://lldap:3890
timeout: 5s timeout: 5s
start_tls: false start_tls: false
base_dn: dc=mgrote,dc=net base_dn: dc=mgrote,dc=net
@ -83,4 +79,4 @@ authentication_backend:
user: uid=authelia_bind_user,ou=people,dc=mgrote,dc=net user: uid=authelia_bind_user,ou=people,dc=mgrote,dc=net
password: '{{ lookup('viczem.keepass.keepass', 'authelia/lldap_authelia_bind_user', 'password') }}' password: '{{ lookup('viczem.keepass.keepass', 'authelia/lldap_authelia_bind_user', 'password') }}'
# Details/Doku: https://wiki.mgrote.net/pages/_Technik/hardware/rest/fpv/software/rest/ldap/ # Details/Doku: https://wiki.mgrote.net/pages/_Technik/software/rest/ldap/

View file

@ -0,0 +1,91 @@
# Details/Doku: https://wiki.mgrote.net/pages/_Technik/software/rest/ldap/?h=ldap
services:
######## authelia ########
authelia:
image: authelia/authelia:4.38.17
container_name: authelia
restart: unless-stopped
pull_policy: missing
environment:
TZ: Europe/Berlin
volumes:
- ./configuration.yml:/config/configuration.yml
- ./users_database.yml:/config/users_database.yml
- authelia_data:/data
labels:
traefik.enable: true
traefik.http.routers.authelia.rule: Host(`auth.mgrote.net`)
traefik.http.services.authelia.loadbalancer.server.port: 9091
traefik.http.routers.authelia.tls: true
traefik.http.routers.authelia.tls.certresolver: resolver_letsencrypt
traefik.http.routers.authelia.entrypoints: entry_https
traefik.http.middlewares.authelia.forwardauth.address: http://authelia:9091/api/verify?rd=https://auth.mgrote.net
traefik.http.middlewares.authelia.forwardauth.trustForwardHeader: true
traefik.http.middlewares.authelia.forwardauth.authResponseHeaders: Remote-User,Remote-Groups,Remote-Name,Remote-Email
depends_on:
- authelia-redis
- authelia-db
networks:
- traefik
- postfix
- authelia
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://authelia:9091"]
interval: 30s
timeout: 10s
retries: 3
######## Redis ########
authelia-redis:
image: "redis:7.4.1"
container_name: authelia-redis
restart: unless-stopped
pull_policy: missing
environment:
TZ: Europe/Berlin
networks:
- authelia
healthcheck:
test: ["CMD", "redis-cli", "--no-auth-warning", "ping"]
interval: 5s
timeout: 2s
retries: 3
######## Datenbank ########
authelia-db:
image: "mariadb:11.6.2"
container_name: authelia-db
command: --transaction-isolation=READ-COMMITTED --log-bin=ROW --innodb_read_only_compressed=OFF
restart: unless-stopped
pull_policy: missing
volumes:
- /etc/localtime:/etc/localtime:ro
- /etc/timezone:/etc/timezone:ro
- db:/var/lib/mysql
environment:
MYSQL_ROOT_PASSWORD: "{{ lookup('viczem.keepass.keepass', 'authelia/authelia_mysql_root_password', 'password') }}"
MYSQL_PASSWORD: "{{ lookup('viczem.keepass.keepass', 'authelia/authelia_mysql_password', 'password') }}"
MYSQL_DATABASE: authelia
MYSQL_USER: authelia
MYSQL_INITDB_SKIP_TZINFO: 1
networks:
- authelia
healthcheck:
test: ["CMD", "mariadb-show", "authelia", "-h", "localhost", "-u", "authelia", "-p{{ lookup('viczem.keepass.keepass', 'authelia/authelia_mysql_password', 'password') }}"]
interval: 30s
timeout: 10s
retries: 3
######## Networks ########
networks:
authelia:
traefik:
external: true
postfix:
external: true
######## Volumes ########
volumes:
authelia_data:
db:

View file

@ -0,0 +1,7 @@
# authelia function matrix
| App | User | Password Reset | Group |
| - | - | - | - |
| ``authelia_*`` | `authelia_bind_user` | yes | `lldap_strict_readonly` + `lldap_password_manager` |
| `forgejo` | `forgejo_bind_user` | no | `lldap_strict_readonly` |
| `nextcloud` | `nextcloud_bind_user` | yes | `lldap_strict_readonly` + `lldap_password_manager` |

View file

@ -2,7 +2,7 @@
services: services:
grampsweb: &grampsweb grampsweb: &grampsweb
container_name: grampsweb container_name: grampsweb
image: ghcr.io/gramps-project/grampsweb:v24.10.0 # version image: ghcr.io/gramps-project/grampsweb:v24.12.1 # version
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
ports: ports:
@ -36,7 +36,7 @@ services:
grampsweb_celery: grampsweb_celery:
<<: *grampsweb # YAML merge key copying the entire grampsweb service config <<: *grampsweb # YAML merge key copying the entire grampsweb service config
ports: [] ports: []
container_name: grampsweb_celery container_name: grampsweb-celery
depends_on: depends_on:
- grampsweb_redis - grampsweb_redis
command: celery -A gramps_webapi.celery worker --loglevel=INFO command: celery -A gramps_webapi.celery worker --loglevel=INFO
@ -44,7 +44,7 @@ services:
grampsweb_redis: grampsweb_redis:
image: redis:7.4.1-alpine image: redis:7.4.1-alpine
container_name: grampsweb_redis container_name: grampsweb-redis
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
healthcheck: healthcheck:

View file

@ -0,0 +1,52 @@
services:
lldap:
image: lldap/lldap:v0.6.1-debian-rootless
container_name: lldap
restart: unless-stopped
pull_policy: missing
ports:
- "3890:3890"
- "17170:17170" # front-end
volumes:
- "lldap_data:/data"
- "./lldap_config.toml:/data/lldap_config.toml"
environment:
TZ: Europe/Berlin
networks:
- traefik
- postfix
- internal
depends_on:
- lldap-db17
######## Postgres ########
lldap-db17:
container_name: "lldap-db"
image: "postgres:17.2"
restart: unless-stopped
pull_policy: missing
environment:
POSTGRES_USER: lldap
POSTGRES_PASSWORD: "{{ lookup('viczem.keepass.keepass', 'lldap/lldap_db_pass', 'password') }}"
TZ: Europe/Berlin
volumes:
- db17:/var/lib/postgresql/data
networks:
- internal
healthcheck:
test: ["CMD", "pg_isready", "-U", "lldap"]
interval: 10s
start_period: 30s
######## Networks ########
networks:
traefik:
external: true
postfix:
external: true
internal:
######## Volumes ########
volumes:
lldap_data:
db17:

View file

@ -0,0 +1,29 @@
verbose = false
ldap_host = "0.0.0.0"
ldap_port = 3890
http_host = "0.0.0.0"
http_port = 17170
http_url = "https://ldap.mgrote.net"
jwt_secret = "{{ lookup('viczem.keepass.keepass', 'lldap/lldap_jwt_secret', 'password') }}"
ldap_base_dn = "dc=mgrote,dc=net"
ldap_user_dn = "{{ lookup('viczem.keepass.keepass', 'lldap/lldap_admin_user', 'username') }}"
ldap_user_email = "lldap-admin@mgrote.net"
ldap_user_pass = "{{ lookup('viczem.keepass.keepass', 'lldap/lldap_admin_user', 'password') }}"
database_url = "postgres://lldap:{{ lookup('viczem.keepass.keepass', 'lldap/lldap_db_pass', 'password') }}@lldap-db/lldap"
key_seed = "{{ lookup('viczem.keepass.keepass', 'lldap/lldap_key_seed', 'password') }}"
key_file = ""
force_ldap_user_pass_reset = "always"
[smtp_options]
enable_password_reset = false
server = "postfix"
port = 25
smtp_encryption = "NONE"
reply_to ="Do not reply <info@mgrote.net>"

View file

@ -2,7 +2,7 @@ services:
######## Miniflux ######## ######## Miniflux ########
miniflux: miniflux:
container_name: "mf-frontend" container_name: "mf-frontend"
image: "ghcr.io/miniflux/miniflux:2.2.3" image: "ghcr.io/miniflux/miniflux:2.2.4"
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
depends_on: depends_on:
@ -19,7 +19,7 @@ services:
CLEANUP_ARCHIVE_READ_DAYS: 90 CLEANUP_ARCHIVE_READ_DAYS: 90
TZ: Europe/Berlin TZ: Europe/Berlin
networks: networks:
- intern - internal
- traefik - traefik
healthcheck: healthcheck:
test: ["CMD", "/usr/bin/miniflux", "-healthcheck", "auto"] test: ["CMD", "/usr/bin/miniflux", "-healthcheck", "auto"]
@ -34,7 +34,7 @@ services:
######## Postgres ######## ######## Postgres ########
mf-db17: mf-db17:
container_name: "mf-db" container_name: "mf-db"
image: "postgres:17.0" image: "postgres:17.2"
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
environment: environment:
@ -45,7 +45,7 @@ services:
volumes: volumes:
- db17:/var/lib/postgresql/data - db17:/var/lib/postgresql/data
networks: networks:
- intern - internal
healthcheck: healthcheck:
test: ["CMD", "pg_isready", "-U", "miniflux"] test: ["CMD", "pg_isready", "-U", "miniflux"]
interval: 10s interval: 10s
@ -68,7 +68,7 @@ services:
volumes: volumes:
- ./filter.txt:/data/filter.txt - ./filter.txt:/data/filter.txt
networks: networks:
- intern - internal
######## Volumes ######## ######## Volumes ########
volumes: volumes:
@ -77,5 +77,5 @@ volumes:
networks: networks:
traefik: traefik:
external: true external: true
intern: internal:
driver: bridge driver: bridge

View file

@ -9,11 +9,15 @@ axios.com::PGA
axios.com::football axios.com::football
computerbase.de::Adrenalin 2020 Edition computerbase.de::Adrenalin 2020 Edition
computerbase.de::Adrenalin 2021 Edition computerbase.de::Adrenalin 2021 Edition
computerbase.de::Adrenalin 2022 Edition
computerbase.de::Adrenalin 2023 Edition
computerbase.de::Adrenalin 2024 Edition
computerbase.de::Adrenalin 2025 Edition
computerbase.de::CB-Funk computerbase.de::CB-Funk
computerbase.de::CB-Funk-Podcast
computerbase.de::Community-Umfrage
computerbase.de::Community-Hilfe computerbase.de::Community-Hilfe
computerbase.de::Community-Umfrage
computerbase.de::Community: Fotowettbewerb computerbase.de::Community: Fotowettbewerb
computerbase.de::Computer-Hilfe
computerbase.de::Elon Musk computerbase.de::Elon Musk
computerbase.de::Fotowettbewerb: computerbase.de::Fotowettbewerb:
computerbase.de::Fussball computerbase.de::Fussball
@ -51,6 +55,8 @@ hardwareluxx.de::Der Hardwareluxx-Webwatch:
hardwareluxx.de::Die Artikel unserer Partner hardwareluxx.de::Die Artikel unserer Partner
hardwareluxx.de::KW hardwareluxx.de::KW
hardwareluxx.de::Shopping Club hardwareluxx.de::Shopping Club
heise.de::Top 10:
heise.de::Top 5:
heise.de::"Passwort" heise.de::"Passwort"
heise.de::#TGIQF heise.de::#TGIQF
heise.de::#heiseshow heise.de::#heiseshow
@ -61,6 +67,7 @@ heise.de::Bit-Rauschen, der Prozessor-Podcast
heise.de::Desinfec heise.de::Desinfec
heise.de::Die Bilder der Woche heise.de::Die Bilder der Woche
heise.de::Die Highlights bei heise.de::Die Highlights bei
heise.de::Die Produktwerker
heise.de::Dienstag heise.de::Dienstag
heise.de::Dienstag: heise.de::Dienstag:
heise.de::Elon Musk heise.de::Elon Musk
@ -68,8 +75,8 @@ heise.de::FIFA
heise.de::Ferris Talk heise.de::Ferris Talk
heise.de::Fotografie heise.de::Fotografie
heise.de::Fotografie-Wettbewerb heise.de::Fotografie-Wettbewerb
heise.de::Fotowettbewerb
heise.de::Fotonews heise.de::Fotonews
heise.de::Fotowettbewerb
heise.de::Freitag heise.de::Freitag
heise.de::Fußball-WM heise.de::Fußball-WM
heise.de::Heise spielt heise.de::Heise spielt
@ -84,7 +91,6 @@ heise.de::Mittwoch
heise.de::Montag heise.de::Montag
heise.de::Podcast heise.de::Podcast
heise.de::Podcast "Die Hupe" heise.de::Podcast "Die Hupe"
heise.de::Die Produktwerker
heise.de::SoftwareArchitekTOUR heise.de::SoftwareArchitekTOUR
heise.de::Sonderheft heise.de::Sonderheft
heise.de::TGIQF heise.de::TGIQF
@ -95,6 +101,7 @@ heise.de::Twitter
heise.de::WM 2022 heise.de::WM 2022
heise.de::Was war. Was wird. heise.de::Was war. Was wird.
heise.de::Zugriff auf alle Inhalte von heise+ heise.de::Zugriff auf alle Inhalte von heise+
heise.de::bestenlisten
heise.de::c't <webdev> heise.de::c't <webdev>
heise.de::ct-Webinar: heise.de::ct-Webinar:
heise.de::die Fotonews der Woche heise.de::die Fotonews der Woche
@ -286,14 +293,12 @@ tagesschau.de::Werders
tagesschau.de::Zukunftspodcast tagesschau.de::Zukunftspodcast
tagesschau.de::Zweierbob: tagesschau.de::Zweierbob:
taz.de::America's Cup taz.de::America's Cup
taz.de::Ski
taz.de::Dart
taz.de::America's Cup
taz.de::America's Cup
taz.de::Bahnrad taz.de::Bahnrad
taz.de::Dart
taz.de::Fussball taz.de::Fussball
taz.de::Läufer taz.de::Läufer
taz.de::NBA taz.de::NBA
taz.de::Ski
taz.de::Tischtennis taz.de::Tischtennis
taz.de::bundesliga taz.de::bundesliga
taz.de::cricket taz.de::cricket

View file

@ -1,9 +0,0 @@
#!/bin/bash
FILTERFILE=./filter.txt
VORHER=$(wc -l < $FILTERFILE)
echo Sortiere und filtere doppelte Zeilen heraus.
sort filter.txt | uniq -u -i | cat > .tmp
NACHHER=$(wc -l < .tmp)
echo Es wurden $((VORHER-NACHHER)) Zeilen entfernt!
mv -f .tmp filter.txt

View file

@ -1,32 +1,39 @@
services: services:
######## navidrome-mg ######## ######## navidrome ########
navidrome-mg: navidrome:
container_name: "navidrome-mg" container_name: "navidrome"
image: "deluan/navidrome:0.53.3" image: "deluan/navidrome:0.54.2"
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
environment: environment:
ND_LOGLEVEL: info
ND_SESSIONTIMEOUT: 24h
ND_MUSICFOLDER: /music
ND_DATAFOLDER: /data
ND_SCANSCHEDULE: 0 1 * * *
ND_TRANSCODINGCACHESIZE: 500MB
ND_IMAGECACHESIZE: 100MB
ND_AUTOIMPORTPLAYLISTS: false ND_AUTOIMPORTPLAYLISTS: false
ND_BASEURL: /mg ND_BASEURL: /mg
ND_ENABLESTARRATING: false ND_COVERARTPRIORITY: "embedded, cover.*, folder.*, front.*, external"
ND_ENABLEGRAVATAR: false ND_DATAFOLDER: /data
ND_LASTFM_ENABLED: false
ND_ENABLETRANSCODINGCONFIG: true
ND_COVERARTPRIORITY: "embedded"
ND_ENABLEARTWORKPRECACHE: true ND_ENABLEARTWORKPRECACHE: true
ND_ENABLECOVERANIMATION: false ND_ENABLECOVERANIMATION: false
ND_ENABLEEXTERNALSERVICES: false ND_ENABLEEXTERNALSERVICES: false
ND_ENABLEFAVOURITES: true
ND_ENABLEGRAVATAR: false
ND_ENABLEINSIGHTSCOLLECTOR: false
ND_ENABLELOGREDACTING: true
ND_ENABLEMEDIAFILECOVERART: true ND_ENABLEMEDIAFILECOVERART: true
ND_ENABLEREPLAYGAIN: true ND_ENABLEREPLAYGAIN: true
ND_ENABLESHARING: false ND_ENABLESHARING: false
ND_ENABLESTARRATING: false
ND_ENABLETRANSCODINGCONFIG: true
ND_IMAGECACHESIZE: 100MB
ND_JUKEBOX_ENABLED: false ND_JUKEBOX_ENABLED: false
ND_LASTFM_ENABLED: false
ND_LISTENBRAINZ_ENABLED: false
ND_LOGLEVEL: info
ND_MUSICFOLDER: /music
ND_PLAYLISTSPATH: "_playlists"
ND_PROMETHEUS_ENABLED: false
ND_RECENTLYADDEDBYMODTIME: true
ND_SCANSCHEDULE: 0 1 * * *
ND_SESSIONTIMEOUT: 24h
ND_TRANSCODINGCACHESIZE: 500MB
PUID: 1000 PUID: 1000
PGID: 1000 PGID: 1000
TZ: Europe/Berlin TZ: Europe/Berlin

View file

@ -1,7 +1,7 @@
services: services:
######## Datenbank ######## ######## Datenbank ########
nextcloud-db: nextcloud-db:
image: "mariadb:11.5.2" image: "mariadb:11.6.2"
container_name: nextcloud-db container_name: nextcloud-db
command: --transaction-isolation=READ-COMMITTED --log-bin=ROW --innodb_read_only_compressed=OFF command: --transaction-isolation=READ-COMMITTED --log-bin=ROW --innodb_read_only_compressed=OFF
restart: unless-stopped restart: unless-stopped
@ -17,7 +17,7 @@ services:
MYSQL_USER: nextcloud MYSQL_USER: nextcloud
MYSQL_INITDB_SKIP_TZINFO: 1 MYSQL_INITDB_SKIP_TZINFO: 1
networks: networks:
- intern - internal
healthcheck: healthcheck:
test: ["CMD", "mariadb-show", "nextcloud", "-h", "localhost", "-u", "nextcloud", "-p{{ lookup('viczem.keepass.keepass', 'nextcloud/nextcloud_mysql_password', 'password') }}"] test: ["CMD", "mariadb-show", "nextcloud", "-h", "localhost", "-u", "nextcloud", "-p{{ lookup('viczem.keepass.keepass', 'nextcloud/nextcloud_mysql_password', 'password') }}"]
interval: 30s interval: 30s
@ -36,7 +36,7 @@ services:
container_name: nextcloud-redis container_name: nextcloud-redis
hostname: nextcloud-redis hostname: nextcloud-redis
networks: networks:
- intern - internal
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
command: "redis-server --requirepass {{ lookup('viczem.keepass.keepass', 'nextcloud/nextcloud_redis_host_password', 'password') }}" command: "redis-server --requirepass {{ lookup('viczem.keepass.keepass', 'nextcloud/nextcloud_redis_host_password', 'password') }}"
@ -62,7 +62,7 @@ services:
######## Nextcloud ######## ######## Nextcloud ########
nextcloud-app: nextcloud-app:
image: "nextcloud:30.0.2" image: "nextcloud:30.0.4"
container_name: nextcloud-app container_name: nextcloud-app
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
@ -101,7 +101,7 @@ services:
- ./misc.sh:/docker-entrypoint-hooks.d/post-installation/misc.sh - ./misc.sh:/docker-entrypoint-hooks.d/post-installation/misc.sh
- ./misc.sh:/docker-entrypoint-hooks.d/before-starting/misc.sh - ./misc.sh:/docker-entrypoint-hooks.d/before-starting/misc.sh
networks: networks:
- intern - internal
- traefik - traefik
- postfix - postfix
healthcheck: healthcheck:
@ -129,7 +129,7 @@ services:
######## Networks ######## ######## Networks ########
networks: networks:
intern: internal:
driver: bridge driver: bridge
traefik: traefik:
external: true external: true

View file

@ -10,7 +10,7 @@ php occ app:enable user_ldap
#php occ ldap:create-empty-config # wird nur bei komplett neuer nextcloud benötigt, legt sonst bei jedem durchlauf weitere ldap-configs an #php occ ldap:create-empty-config # wird nur bei komplett neuer nextcloud benötigt, legt sonst bei jedem durchlauf weitere ldap-configs an
# EDIT: domain # EDIT: domain
php occ ldap:set-config s01 ldapHost "ldap://ldap.mgrote.net." php occ ldap:set-config s01 ldapHost "ldap://lldap."
php occ ldap:set-config s01 ldapPort 3890 php occ ldap:set-config s01 ldapPort 3890
# EDIT: admin user # EDIT: admin user
php occ ldap:set-config s01 ldapAgentName "uid=nextcloud_bind_user,ou=people,dc=mgrote,dc=net" php occ ldap:set-config s01 ldapAgentName "uid=nextcloud_bind_user,ou=people,dc=mgrote,dc=net"
@ -45,5 +45,6 @@ php occ ldap:set-config s01 ldapUuidGroupAttribute auto
php occ ldap:set-config s01 ldapUuidUserAttribute auto php occ ldap:set-config s01 ldapUuidUserAttribute auto
php occ ldap:set-config s01 ldapExpertUsernameAttr user_id php occ ldap:set-config s01 ldapExpertUsernameAttr user_id
php occ ldap:set-config s01 ldap_mark_remnants_as_disabled 1 php occ ldap:set-config s01 ldap_mark_remnants_as_disabled 1
php occ ldap:set-config s01 ldap_turn_on_pwd_change 1
# damit der Login über LDAP geht muss das Attribute "DisplayName" gesetzt sein! # damit der Login über LDAP geht muss das Attribute "DisplayName" gesetzt sein!

View file

@ -8,7 +8,7 @@ services:
- oci:/var/lib/registry - oci:/var/lib/registry
networks: networks:
- traefik - traefik
- intern - internal
depends_on: depends_on:
- oci-registry-redis - oci-registry-redis
healthcheck: healthcheck:
@ -51,7 +51,7 @@ services:
image: "redis:7.4.1" image: "redis:7.4.1"
container_name: oci-registry-redis container_name: oci-registry-redis
networks: networks:
- intern - internal
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
environment: environment:
@ -66,9 +66,10 @@ services:
oci-registry-ui: oci-registry-ui:
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
# url: registry.mgrote.net/ui/index.html
image: "joxit/docker-registry-ui:2.5.7" image: "joxit/docker-registry-ui:2.5.7"
container_name: oci-registry-ui container_name: oci-registry-ui
ports:
- 5511:80
environment: environment:
DELETE_IMAGES: true DELETE_IMAGES: true
SINGLE_REGISTRY: true SINGLE_REGISTRY: true
@ -80,26 +81,18 @@ services:
- oci-registry - oci-registry
networks: networks:
- traefik - traefik
- intern - internal
healthcheck: healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1"] test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://127.0.0.1"]
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 3 retries: 3
labels:
traefik.http.routers.registry-ui.rule: Host(`rui.mgrote.net`)
traefik.http.routers.registry-ui.middlewares: allowlist_localnet@file,ratelimit40@file,authelia@docker
traefik.enable: true
traefik.http.routers.registry-ui.tls: true
traefik.http.routers.registry-ui.tls.certresolver: resolver_letsencrypt
traefik.http.routers.registry-ui.entrypoints: entry_https
traefik.http.services.registry-ui.loadbalancer.server.port: 80
######## Networks ######## ######## Networks ########
networks: networks:
traefik: traefik:
external: true external: true
intern: internal:
######## Volumes ######## ######## Volumes ########
volumes: volumes:

View file

@ -1,10 +1,10 @@
# Details/Doku: https://wiki.mgrote.net/pages/_Technik/hardware/rest/fpv/software/rest/ldap/ # Details/Doku: https://wiki.mgrote.net/pages/_Technik/software/rest/ldap/?h=ldap
services: services:
######## traefik ######## ######## traefik ########
traefik: traefik:
container_name: traefik container_name: traefik
image: "traefik:v3.2.0" image: "traefik:v3.2.3"
restart: unless-stopped restart: unless-stopped
pull_policy: missing pull_policy: missing
volumes: volumes:
@ -26,84 +26,6 @@ services:
interval: 30s interval: 30s
timeout: 10s timeout: 10s
retries: 3 retries: 3
depends_on:
- authelia
######## authelia ########
authelia:
image: authelia/authelia:4.38.17
container_name: authelia
restart: unless-stopped
pull_policy: missing
environment:
TZ: Europe/Berlin
volumes:
- ./configuration.yml:/config/configuration.yml
- ./users_database.yml:/config/users_database.yml
- authelia_data:/data
labels:
traefik.enable: true
traefik.http.routers.authelia.rule: Host(`auth.mgrote.net`)
traefik.http.services.authelia.loadbalancer.server.port: 9091
traefik.http.routers.authelia.tls: true
traefik.http.routers.authelia.tls.certresolver: resolver_letsencrypt
traefik.http.routers.authelia.entrypoints: entry_https
traefik.http.middlewares.authelia.forwardauth.address: http://authelia:9091/api/verify?rd=https://auth.mgrote.net
traefik.http.middlewares.authelia.forwardauth.trustForwardHeader: true
traefik.http.middlewares.authelia.forwardauth.authResponseHeaders: Remote-User,Remote-Groups,Remote-Name,Remote-Email
depends_on:
- authelia-redis
- authelia-db
networks:
- traefik
- postfix
- authelia
healthcheck:
test: ["CMD", "wget", "--quiet", "--tries=1", "--spider", "http://authelia:9091"]
interval: 30s
timeout: 10s
retries: 3
authelia-redis:
image: "redis:7.4.1"
container_name: authelia-redis
restart: unless-stopped
pull_policy: missing
environment:
TZ: Europe/Berlin
networks:
- authelia
healthcheck:
test: ["CMD", "redis-cli", "--no-auth-warning", "ping"]
interval: 5s
timeout: 2s
retries: 3
######## Datenbank ########
authelia-db:
image: "mariadb:11.5.2"
container_name: authelia-db
command: --transaction-isolation=READ-COMMITTED --log-bin=ROW --innodb_read_only_compressed=OFF
restart: unless-stopped
pull_policy: missing
volumes:
- /etc/localtime:/etc/localtime:ro
- /etc/timezone:/etc/timezone:ro
- db:/var/lib/mysql
environment:
MYSQL_ROOT_PASSWORD: "{{ lookup('viczem.keepass.keepass', 'authelia/authelia_mysql_root_password', 'password') }}"
MYSQL_PASSWORD: "{{ lookup('viczem.keepass.keepass', 'authelia/authelia_mysql_password', 'password') }}"
MYSQL_DATABASE: authelia
MYSQL_USER: authelia
MYSQL_INITDB_SKIP_TZINFO: 1
networks:
- authelia
healthcheck:
test: ["CMD", "mariadb-show", "authelia", "-h", "localhost", "-u", "authelia", "-p{{ lookup('viczem.keepass.keepass', 'authelia/authelia_mysql_password', 'password') }}"]
interval: 30s
timeout: 10s
retries: 3
######## Networks ######## ######## Networks ########
networks: networks:
@ -115,5 +37,3 @@ networks:
######## Volumes ######## ######## Volumes ########
volumes: volumes:
acme_data: acme_data:
authelia_data:
db:

View file

@ -45,7 +45,7 @@ services:
# 1. mongosh # 1. mongosh
# 2. db.getSiblingDB("unifidb").createUser({user: "unifiuser", pwd: "GEHEIM", roles: [{role: "dbOwner", db: "unifidb"}, {role: "dbOwner", db: "unifidb_stat"}]}); # 2. db.getSiblingDB("unifidb").createUser({user: "unifiuser", pwd: "GEHEIM", roles: [{role: "dbOwner", db: "unifidb"}, {role: "dbOwner", db: "unifidb_stat"}]});
# https://discourse.linuxserver.io/t/cant-connect-to-mongodb-for-unifi-network-application/8166 # https://discourse.linuxserver.io/t/cant-connect-to-mongodb-for-unifi-network-application/8166
image: "docker.io/mongo:8.0.3" image: "docker.io/mongo:8.0.4"
container_name: unifi-db container_name: unifi-db
volumes: volumes:
- db-data:/data/db - db-data:/data/db

View file

@ -35,7 +35,7 @@ ufw_rules:
from_ip: 192.168.2.0/24 from_ip: 192.168.2.0/24
### mgrote_lldap ### mgrote_lldap
lldap_package_url: "https://download.opensuse.org/repositories/home:/Masgalor:/LLDAP/xUbuntu_22.04/amd64/lldap_0.5.0-1+4.1_amd64.deb" lldap_package_url: "https://download.opensuse.org/repositories/home:/Masgalor:/LLDAP/xUbuntu_22.04/amd64/lldap_0.6.0-1+1.1_amd64.deb"
lldap_logging_verbose: "true" # must be a string not a boolean lldap_logging_verbose: "true" # must be a string not a boolean
lldap_http_port: 17170 lldap_http_port: 17170
lldap_http_host: "0.0.0.0" lldap_http_host: "0.0.0.0"

View file

@ -11,7 +11,7 @@ services:
# For the web front-end # For the web front-end
- "17170:17170" - "17170:17170"
networks: networks:
- intern - internal
- traefik - traefik
- postfix - postfix
volumes: volumes:
@ -56,7 +56,7 @@ services:
- MYSQL_USER=lldap-db-user - MYSQL_USER=lldap-db-user
- MYSQL_INITDB_SKIP_TZINFO=1 - MYSQL_INITDB_SKIP_TZINFO=1
networks: networks:
- intern - internal
labels: labels:
- com.centurylinklabs.watchtower.enable=true - com.centurylinklabs.watchtower.enable=true
@ -66,7 +66,7 @@ volumes:
db: db:
######## Networks ######## ######## Networks ########
networks: networks:
intern: internal:
traefik: traefik:
external: true external: true
postfix: postfix:

View file

@ -8,9 +8,11 @@
url: "{{ minio_url }}" url: "{{ minio_url }}"
state: "{{ item.state | default('present') }}" state: "{{ item.state | default('present') }}"
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
delegate_to: localhost
- name: Deletion Info - name: Deletion Info
ansible.builtin.debug: ansible.builtin.debug:
msg: "Deletion only possible with empty bucket ({{ item.name }})" msg: "Deletion only possible with empty bucket ({{ item.name }})"
when: '"absent" in item.state' when: '"absent" in item.state'
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
delegate_to: localhost

View file

@ -12,6 +12,7 @@
access_key: "{{ minio_root_access_key }}" access_key: "{{ minio_root_access_key }}"
secret_key: "{{ minio_root_secret_key }}" secret_key: "{{ minio_root_secret_key }}"
state: present state: present
delegate_to: localhost
- name: include policy tasks - name: include policy tasks
ansible.builtin.include_tasks: policy.yml ansible.builtin.include_tasks: policy.yml
@ -31,6 +32,7 @@
state: "{{ item.state | default('present') }}" state: "{{ item.state | default('present') }}"
loop: "{{ minio_users }}" loop: "{{ minio_users }}"
no_log: "{{ no_debug | default('true') }}" no_log: "{{ no_debug | default('true') }}"
delegate_to: localhost
# Bug: delegate_to: localhost # Bug: delegate_to: localhost
# in ansible-devspace wird das Python Paket "minio" nicht gefunden # in ansible-devspace wird das Python Paket "minio" nicht gefunden

View file

@ -7,6 +7,7 @@
owner: root owner: root
group: root group: root
mode: '0644' mode: '0644'
delegate_to: localhost
- name: "prep: template policy files (ro)" - name: "prep: template policy files (ro)"
ansible.builtin.template: ansible.builtin.template:
@ -16,6 +17,7 @@
group: root group: root
mode: '0644' mode: '0644'
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
delegate_to: localhost
- name: "prep: template policy files (rw)" - name: "prep: template policy files (rw)"
ansible.builtin.template: ansible.builtin.template:
@ -25,16 +27,19 @@
group: root group: root
mode: '0644' mode: '0644'
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
delegate_to: localhost
- name: "setup policies (ro)" - name: "setup policies (ro)"
ansible.builtin.command: "{{ minio_client_bin }} --disable-pager admin policy create {{ minio_root_alias }} {{ item.name }}_ro {{ minio_config_dir }}/{{ item.name }}_ro" ansible.builtin.command: "{{ minio_client_bin }} --disable-pager admin policy create {{ minio_root_alias }} {{ item.name }}_ro {{ minio_config_dir }}/{{ item.name }}_ro"
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
changed_when: false # Befehl gibt immer "Created policy `testbucket3_ro` successfully." aus, unabhängig ob sie schon existiert oder nicht. changed_when: false # Befehl gibt immer "Created policy `testbucket3_ro` successfully." aus, unabhängig ob sie schon existiert oder nicht.
delegate_to: localhost
- name: "setup policies (rw)" - name: "setup policies (rw)"
ansible.builtin.command: "{{ minio_client_bin }} --disable-pager admin policy create {{ minio_root_alias }} {{ item.name }}_rw {{ minio_config_dir }}/{{ item.name }}_rw" ansible.builtin.command: "{{ minio_client_bin }} --disable-pager admin policy create {{ minio_root_alias }} {{ item.name }}_rw {{ minio_config_dir }}/{{ item.name }}_rw"
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
changed_when: false # Befehl gibt immer "Created policy `testbucket3_ro` successfully." aus, unabhängig ob sie schon existiert oder nicht. changed_when: false # Befehl gibt immer "Created policy `testbucket3_ro` successfully." aus, unabhängig ob sie schon existiert oder nicht.
delegate_to: localhost
- name: "remove old policy files" - name: "remove old policy files"
ansible.builtin.file: ansible.builtin.file:
@ -42,3 +47,4 @@
state: absent state: absent
loop: "{{ minio_buckets }}" loop: "{{ minio_buckets }}"
when: '"absent" in item.state' when: '"absent" in item.state'
delegate_to: localhost

View file

@ -85,13 +85,9 @@ blocky_custom_lookups: # optional
- name: fritz.box - name: fritz.box
ip: 192.168.5.1 ip: 192.168.5.1
- name: ldap.mgrote.net - name: ldap.mgrote.net
ip: 192.168.2.47 ip: 192.168.2.43
- name: munin.mgrote.net - name: munin.mgrote.net
ip: 192.168.2.40 ip: 192.168.2.40
- name: s3.mgrote.net
ip: 192.168.2.43
- name: rui.mgrote.net
ip: 192.168.2.43
### mgrote_munin_node ### mgrote_munin_node
# kann git.mgrote.net nicht auflösen, deshalb hiermit IP # kann git.mgrote.net nicht auflösen, deshalb hiermit IP

View file

@ -1,17 +1,4 @@
--- ---
### mgrote_minio_configure
minio_url: https://s3.mgrote.net
minio_root_access_key: "{{ lookup('viczem.keepass.keepass', 'minio/minio_root_access_key', 'password') }}"
minio_root_secret_key: "{{ lookup('viczem.keepass.keepass', 'minio/minio_root_secret_key', 'password') }}"
minio_users:
- name: testuser
secret: "{{ lookup('viczem.keepass.keepass', 'minio/minio_testuser_secret_key', 'password') }}"
state: present
policy: testbucket_rw
minio_buckets:
- name: testbucket
state: present
### mrlesmithjr.manage_lvm ### mrlesmithjr.manage_lvm
lvm_groups: lvm_groups:
- vgname: vg_docker - vgname: vg_docker
@ -32,7 +19,6 @@ pvresize_to_max: true
pip_package: python3-pip pip_package: python3-pip
pip_install_packages: pip_install_packages:
- name: docker # für munin-plugin docker_ - name: docker # für munin-plugin docker_
- name: minio # für ansible-minio_configure-Rolle
### mgrote.apt_manage_packages ### mgrote.apt_manage_packages
apt_packages_extra: apt_packages_extra:

View file

@ -148,7 +148,7 @@ gitea_fail2ban_jail_bantime: "600"
gitea_fail2ban_jail_action: "iptables-allports" gitea_fail2ban_jail_action: "iptables-allports"
### mgrote_gitea_setup ### mgrote_gitea_setup
gitea_ldap_host: "ldap.mgrote.net" gitea_ldap_host: "docker10.mgrote.net"
gitea_ldap_base_path: "dc=mgrote,dc=net" gitea_ldap_base_path: "dc=mgrote,dc=net"
gitea_ldap_bind_user: "forgejo_bind_user" gitea_ldap_bind_user: "forgejo_bind_user"
gitea_ldap_bind_pass: "{{ lookup('viczem.keepass.keepass', 'forgejo/lldap_forgejo_bind_user', 'password') }}" gitea_ldap_bind_pass: "{{ lookup('viczem.keepass.keepass', 'forgejo/lldap_forgejo_bind_user', 'password') }}"

View file

@ -67,9 +67,6 @@ munin_hosts:
- name: blocky.mgrote.net - name: blocky.mgrote.net
address: blocky.mgrote.net address: blocky.mgrote.net
extra: ["use_node_name yes"] extra: ["use_node_name yes"]
- name: ldap.mgrote.net
address: ldap.mgrote.net
extra: ["use_node_name yes"]
### mgrote_munin_node ### mgrote_munin_node
munin_node_bind_host: "127.0.0.1" munin_node_bind_host: "127.0.0.1"
@ -104,7 +101,7 @@ munin_node_plugins:
src: https://git.mgrote.net/mirrors/munin-contrib/raw/branch/master/plugins/http/http_response src: https://git.mgrote.net/mirrors/munin-contrib/raw/branch/master/plugins/http/http_response
config: | config: |
[http_response] [http_response]
env.sites https://git.mgrote.net http://ldap.mgrote.net:17170 https://docker10.mgrote.net:8443 https://registry.mgrote.net/ui/ http://munin.mgrote.net http://192.168.5.1 http://192.168.3.1 http://192.168.3.108:8080 http://192.168.3.204 http://docker10.mgrote.net:6483 https://miniflux.mgrote.net/ https://nextcloud.mgrote.net https://audio.mgrote.net/mg http://wiki.mgrote.net https://s3.mgrote.net https://auth.mgrote.net env.sites https://git.mgrote.net https://docker10.mgrote.net:8443 http://munin.mgrote.net http://192.168.5.1 http://192.168.3.1 http://192.168.3.108:8080 http://192.168.3.204 http://docker10.mgrote.net:6483 https://miniflux.mgrote.net/ https://nextcloud.mgrote.net https://audio.mgrote.net/mg http://wiki.mgrote.net https://auth.mgrote.net http://docker10.mgrote.net:17170
env.max_time 20 env.max_time 20
env.short_label true env.short_label true
env.follow_redirect true env.follow_redirect true

View file

@ -40,7 +40,7 @@ cv4pve_api_user: root@pam!cv4pve-autosnap
cv4pve_api_token: "{{ lookup('viczem.keepass.keepass', 'cv4pve_api_token', 'password') }}" cv4pve_api_token: "{{ lookup('viczem.keepass.keepass', 'cv4pve_api_token', 'password') }}"
cv4pve_vmid: all,-107 cv4pve_vmid: all,-107
cv4pve_keep_snapshots: 5 cv4pve_keep_snapshots: 5
cv4pve_version: v1.14.11 cv4pve_version: v1.15.0
### mgrote_apt_manage_packages ### mgrote_apt_manage_packages
apt_packages_extra: apt_packages_extra:

View file

@ -50,8 +50,9 @@ compose_files:
state: present state: present
- name: act-runner - name: act-runner
state: present state: present
- name: minio - name: lldap
state: present state: present
network: traefik
### oefenweb.ufw ### oefenweb.ufw
ufw_rules: ufw_rules:

View file

@ -230,8 +230,6 @@ pve_pbs_datastore:
### mgrote_sync ### mgrote_sync
rsync_host_role: source rsync_host_role: source
rsync_mirror_dirs: # bei Src+Dest KEIN "/" am Ende rsync_mirror_dirs: # bei Src+Dest KEIN "/" am Ende
- src: /hdd_data/tmp
dest: "{{ rsync_mirror_user }}@pbs.mgrote.net:/backup/pve5/tmp"
- src: /hdd_data/archiv - src: /hdd_data/archiv
dest: "{{ rsync_mirror_user }}@pbs.mgrote.net:/backup/pve5/archiv" dest: "{{ rsync_mirror_user }}@pbs.mgrote.net:/backup/pve5/archiv"
- src: /hdd_data/backup - src: /hdd_data/backup

View file

@ -6,9 +6,6 @@ all:
blocky: blocky:
hosts: hosts:
blocky.mgrote.net: blocky.mgrote.net:
ldap:
hosts:
ldap.mgrote.net:
lxc: lxc:
hosts: hosts:
fileserver3.mgrote.net: fileserver3.mgrote.net:
@ -47,7 +44,6 @@ all:
docker10.mgrote.net: docker10.mgrote.net:
pbs.mgrote.net: pbs.mgrote.net:
blocky.mgrote.net: blocky.mgrote.net:
ldap.mgrote.net:
munin.mgrote.net: munin.mgrote.net:
test: test:
hosts: hosts:

Binary file not shown.

View file

@ -25,6 +25,3 @@
- role: mgrote_docker_housekeeping - role: mgrote_docker_housekeeping
tags: "housekeeping" tags: "housekeeping"
become: true become: true
- role: mgrote_minio_configure
tags: "minio"
become: true

View file

@ -51,13 +51,22 @@
when: when:
- item.network is defined - item.network is defined
- name: restart changed container - name: Pull images
become: true become: true
community.docker.docker_compose_v2: ansible.builtin.command: docker-compose pull
project_src: "{{ compose_dest_basedir }}/{{ item.name }}" args:
state: restarted chdir: "{{ compose_dest_basedir }}/{{ item.name }}"
dependencies: true register: pull_result # speichere ergebnis in var
pull: missing changed_when: "pull_result.rc == 0" # markiere tasks als changed when exit-code == 0
loop: "{{ compose_files }}"
- name: (re)start container
become: true
ansible.builtin.command: docker-compose up -d
args:
chdir: "{{ compose_dest_basedir }}/{{ item.name }}"
register: start_result # speichere ergebnis in var
changed_when: "start_result.rc == 0" # markiere tasks als changed when exit-code == 0
loop: "{{ compose_files }}" loop: "{{ compose_files }}"
when: when:
- item.state == "present" - item.state == "present"