From 6a43132bc8d318070e1f592a8d99542f5e2cbbc9 Mon Sep 17 00:00:00 2001 From: counterweight Date: Mon, 1 Dec 2025 11:16:47 +0100 Subject: [PATCH] too much stuff --- ansible/example.inventory.ini | 18 +++---- .../01_user_and_access_setup_playbook.yml | 2 +- .../02_firewall_and_fail2ban_playbook.yml | 2 +- ansible/infra/430_cpu_temp_alerts.yml | 2 +- ansible/infra/920_join_headscale_mesh.yml | 12 ++--- .../nodito/30_proxmox_bootstrap_playbook.yml | 2 +- .../31_proxmox_community_repos_playbook.yml | 2 +- .../nodito/32_zfs_pool_setup_playbook.yml | 2 +- .../33_proxmox_debian_cloud_template.yml | 2 +- ansible/services/caddy_playbook.yml | 2 +- ansible/services/forgejo/forgejo_vars.yml | 8 ++-- .../forgejo/setup_backup_forgejo_to_lapy.yml | 4 +- ansible/services/headscale/headscale_vars.yml | 8 ++-- .../setup_backup_headscale_to_lapy.yml | 4 +- ansible/services/lnbits/lnbits_vars.yml | 8 ++-- .../lnbits/setup_backup_lnbits_to_lapy.yml | 4 +- ansible/services/memos/memos_vars.yml | 8 ++-- .../ntfy_emergency_app_vars.yml | 8 ++-- .../services/ntfy/deploy_ntfy_playbook.yml | 3 +- .../setup_backup_uptime_kuma_to_lapy.yml | 48 +++++++++++++++++-- .../services/uptime_kuma/uptime_kuma_vars.yml | 10 ++-- .../deploy_vaultwarden_playbook.yml | 5 +- .../setup_backup_vaultwarden_to_lapy.yml | 48 +++++++++++++++++-- .../services/vaultwarden/vaultwarden_vars.yml | 8 ++-- ansible/services_config.yml | 16 +++---- 25 files changed, 167 insertions(+), 69 deletions(-) diff --git a/ansible/example.inventory.ini b/ansible/example.inventory.ini index 63e73c4..bde96dd 100644 --- a/ansible/example.inventory.ini +++ b/ansible/example.inventory.ini @@ -1,14 +1,14 @@ -[vipy] -your.vps.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key +[vps] +vipy ansible_host=your.services.vps.ip ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key +watchtower ansible_host=your.monitoring.vps.ip ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key +spacey ansible_host=your.headscale.vps.ip ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key -[watchtower] -your.vps.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key +[nodito_host] +nodito ansible_host=your.proxmox.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key ansible_ssh_pass=your_root_password -[nodito] -your.proxmox.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key ansible_ssh_pass=your_root_password - -[spacey] -your.vps.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key +[nodito_vms] +# Example node, replace with your VM names and addresses +# memos_box ansible_host=192.168.1.150 ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key # Local connection to laptop: this assumes you're running ansible commands from your personal laptop # Make sure to adjust the username diff --git a/ansible/infra/01_user_and_access_setup_playbook.yml b/ansible/infra/01_user_and_access_setup_playbook.yml index 7654622..a86772c 100644 --- a/ansible/infra/01_user_and_access_setup_playbook.yml +++ b/ansible/infra/01_user_and_access_setup_playbook.yml @@ -1,5 +1,5 @@ - name: Secure Debian VPS - hosts: vipy,watchtower,spacey + hosts: vps vars_files: - ../infra_vars.yml become: true diff --git a/ansible/infra/02_firewall_and_fail2ban_playbook.yml b/ansible/infra/02_firewall_and_fail2ban_playbook.yml index ef07476..b50a0e3 100644 --- a/ansible/infra/02_firewall_and_fail2ban_playbook.yml +++ b/ansible/infra/02_firewall_and_fail2ban_playbook.yml @@ -1,5 +1,5 @@ - name: Secure Debian VPS - hosts: vipy,watchtower,spacey + hosts: vps vars_files: - ../infra_vars.yml become: true diff --git a/ansible/infra/430_cpu_temp_alerts.yml b/ansible/infra/430_cpu_temp_alerts.yml index d3c00be..3b87102 100644 --- a/ansible/infra/430_cpu_temp_alerts.yml +++ b/ansible/infra/430_cpu_temp_alerts.yml @@ -1,5 +1,5 @@ - name: Deploy CPU Temperature Monitoring - hosts: nodito + hosts: nodito_host become: yes vars_files: - ../infra_vars.yml diff --git a/ansible/infra/920_join_headscale_mesh.yml b/ansible/infra/920_join_headscale_mesh.yml index 10675ae..a0c3b5a 100644 --- a/ansible/infra/920_join_headscale_mesh.yml +++ b/ansible/infra/920_join_headscale_mesh.yml @@ -5,20 +5,18 @@ - ../infra_vars.yml - ../services_config.yml vars: + headscale_host_name: "spacey" headscale_subdomain: "{{ subdomains.headscale }}" headscale_domain: "https://{{ headscale_subdomain }}.{{ root_domain }}" headscale_namespace: "{{ service_settings.headscale.namespace }}" tasks: - - name: Set headscale host - set_fact: - headscale_host: "{{ groups['spacey'][0] }}" - - name: Set facts for headscale server connection set_fact: - headscale_user: "{{ hostvars[headscale_host]['ansible_user'] }}" - headscale_key: "{{ hostvars[headscale_host]['ansible_ssh_private_key_file'] | default('') }}" - headscale_port: "{{ hostvars[headscale_host]['ansible_port'] | default(22) }}" + headscale_host: "{{ hostvars.get(headscale_host_name, {}).get('ansible_host', headscale_host_name) }}" + headscale_user: "{{ hostvars.get(headscale_host_name, {}).get('ansible_user', 'counterweight') }}" + headscale_key: "{{ hostvars.get(headscale_host_name, {}).get('ansible_ssh_private_key_file', '') }}" + headscale_port: "{{ hostvars.get(headscale_host_name, {}).get('ansible_port', 22) }}" - name: Get user ID for namespace from headscale server via lapy delegate_to: "{{ groups['lapy'][0] }}" diff --git a/ansible/infra/nodito/30_proxmox_bootstrap_playbook.yml b/ansible/infra/nodito/30_proxmox_bootstrap_playbook.yml index 842d2c0..02c6679 100644 --- a/ansible/infra/nodito/30_proxmox_bootstrap_playbook.yml +++ b/ansible/infra/nodito/30_proxmox_bootstrap_playbook.yml @@ -1,5 +1,5 @@ - name: Bootstrap Nodito SSH Key Access - hosts: nodito + hosts: nodito_host become: true vars_files: - ../infra_vars.yml diff --git a/ansible/infra/nodito/31_proxmox_community_repos_playbook.yml b/ansible/infra/nodito/31_proxmox_community_repos_playbook.yml index 64d81c2..b0be2ef 100644 --- a/ansible/infra/nodito/31_proxmox_community_repos_playbook.yml +++ b/ansible/infra/nodito/31_proxmox_community_repos_playbook.yml @@ -1,5 +1,5 @@ - name: Switch Proxmox VE from Enterprise to Community Repositories - hosts: nodito + hosts: nodito_host become: true vars_files: - ../infra_vars.yml diff --git a/ansible/infra/nodito/32_zfs_pool_setup_playbook.yml b/ansible/infra/nodito/32_zfs_pool_setup_playbook.yml index 192cd00..4ff0ed4 100644 --- a/ansible/infra/nodito/32_zfs_pool_setup_playbook.yml +++ b/ansible/infra/nodito/32_zfs_pool_setup_playbook.yml @@ -1,5 +1,5 @@ - name: Setup ZFS RAID 1 Pool for Proxmox Storage - hosts: nodito + hosts: nodito_host become: true vars_files: - ../infra_vars.yml diff --git a/ansible/infra/nodito/33_proxmox_debian_cloud_template.yml b/ansible/infra/nodito/33_proxmox_debian_cloud_template.yml index 40cf26e..e8f8332 100644 --- a/ansible/infra/nodito/33_proxmox_debian_cloud_template.yml +++ b/ansible/infra/nodito/33_proxmox_debian_cloud_template.yml @@ -1,5 +1,5 @@ - name: Create Proxmox template from Debian cloud image (no VM clone) - hosts: nodito + hosts: nodito_host become: true vars_files: - ../../infra_vars.yml diff --git a/ansible/services/caddy_playbook.yml b/ansible/services/caddy_playbook.yml index f0985f5..4935424 100644 --- a/ansible/services/caddy_playbook.yml +++ b/ansible/services/caddy_playbook.yml @@ -1,5 +1,5 @@ - name: Install and configure Caddy on Debian 12 - hosts: vipy,watchtower,spacey + hosts: vps become: yes tasks: diff --git a/ansible/services/forgejo/forgejo_vars.yml b/ansible/services/forgejo/forgejo_vars.yml index 6277f9a..0bbb5a5 100644 --- a/ansible/services/forgejo/forgejo_vars.yml +++ b/ansible/services/forgejo/forgejo_vars.yml @@ -12,9 +12,11 @@ forgejo_user: "git" # (caddy_sites_dir and subdomain now in services_config.yml) # Remote access -remote_host: "{{ groups['vipy'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "vipy" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" # Local backup local_backup_dir: "{{ lookup('env', 'HOME') }}/forgejo-backups" diff --git a/ansible/services/forgejo/setup_backup_forgejo_to_lapy.yml b/ansible/services/forgejo/setup_backup_forgejo_to_lapy.yml index 7e27ba6..05a6aed 100644 --- a/ansible/services/forgejo/setup_backup_forgejo_to_lapy.yml +++ b/ansible/services/forgejo/setup_backup_forgejo_to_lapy.yml @@ -53,9 +53,9 @@ ENCRYPTED_BACKUP="{{ local_backup_dir }}/forgejo-backup-$TIMESTAMP.tar.gz.gpg" {% if remote_key_file %} - SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}" {% else %} - SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -p {{ remote_port }}" {% endif %} echo "Stopping Forgejo service..." diff --git a/ansible/services/headscale/headscale_vars.yml b/ansible/services/headscale/headscale_vars.yml index 39b70b6..653c175 100644 --- a/ansible/services/headscale/headscale_vars.yml +++ b/ansible/services/headscale/headscale_vars.yml @@ -13,9 +13,11 @@ headscale_data_dir: /var/lib/headscale # Namespace now configured in services_config.yml under service_settings.headscale.namespace # Remote access -remote_host: "{{ groups['spacey'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "spacey" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" # Local backup local_backup_dir: "{{ lookup('env', 'HOME') }}/headscale-backups" diff --git a/ansible/services/headscale/setup_backup_headscale_to_lapy.yml b/ansible/services/headscale/setup_backup_headscale_to_lapy.yml index 6a9136a..5f9a764 100644 --- a/ansible/services/headscale/setup_backup_headscale_to_lapy.yml +++ b/ansible/services/headscale/setup_backup_headscale_to_lapy.yml @@ -43,9 +43,9 @@ mkdir -p "$BACKUP_DIR" {% if remote_key_file %} - SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}" {% else %} - SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -p {{ remote_port }}" {% endif %} # Stop headscale service for consistent backup diff --git a/ansible/services/lnbits/lnbits_vars.yml b/ansible/services/lnbits/lnbits_vars.yml index 672f300..bdb97df 100644 --- a/ansible/services/lnbits/lnbits_vars.yml +++ b/ansible/services/lnbits/lnbits_vars.yml @@ -6,9 +6,11 @@ lnbits_port: 8765 # (caddy_sites_dir and subdomain now in services_config.yml) # Remote access -remote_host: "{{ groups['vipy'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "vipy" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" # Local backup local_backup_dir: "{{ lookup('env', 'HOME') }}/lnbits-backups" diff --git a/ansible/services/lnbits/setup_backup_lnbits_to_lapy.yml b/ansible/services/lnbits/setup_backup_lnbits_to_lapy.yml index 2666f69..012c78a 100644 --- a/ansible/services/lnbits/setup_backup_lnbits_to_lapy.yml +++ b/ansible/services/lnbits/setup_backup_lnbits_to_lapy.yml @@ -45,9 +45,9 @@ ENCRYPTED_BACKUP="{{ local_backup_dir }}/lnbits-backup-$TIMESTAMP.tar.gz.gpg" {% if remote_key_file %} - SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}" {% else %} - SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -p {{ remote_port }}" {% endif %} # Stop LNBits service before backup diff --git a/ansible/services/memos/memos_vars.yml b/ansible/services/memos/memos_vars.yml index f6c6e57..d027842 100644 --- a/ansible/services/memos/memos_vars.yml +++ b/ansible/services/memos/memos_vars.yml @@ -5,9 +5,11 @@ memos_port: 5230 # (caddy_sites_dir and subdomain now in services_config.yml) # Remote access -remote_host: "{{ groups['memos_box'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "memos-box" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" # Local backup local_backup_dir: "{{ lookup('env', 'HOME') }}/memos-backups" diff --git a/ansible/services/ntfy-emergency-app/ntfy_emergency_app_vars.yml b/ansible/services/ntfy-emergency-app/ntfy_emergency_app_vars.yml index d551c4c..415bc4d 100644 --- a/ansible/services/ntfy-emergency-app/ntfy_emergency_app_vars.yml +++ b/ansible/services/ntfy-emergency-app/ntfy_emergency_app_vars.yml @@ -9,6 +9,8 @@ ntfy_emergency_app_topic: "emergencia" ntfy_emergency_app_ui_message: "Leave Pablo a message, he will respond as soon as possible" # Remote access -remote_host: "{{ groups['vipy'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "vipy" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" diff --git a/ansible/services/ntfy/deploy_ntfy_playbook.yml b/ansible/services/ntfy/deploy_ntfy_playbook.yml index 0c2268d..0729baa 100644 --- a/ansible/services/ntfy/deploy_ntfy_playbook.yml +++ b/ansible/services/ntfy/deploy_ntfy_playbook.yml @@ -3,6 +3,7 @@ become: yes vars_files: - ../../infra_vars.yml + - ../../infra_secrets.yml - ../../services_config.yml - ./ntfy_vars.yml vars: @@ -73,7 +74,7 @@ - name: Create ntfy admin user shell: | - (echo "{{ lookup('env', 'NTFY_PASSWORD') }}"; echo "{{ lookup('env', 'NTFY_PASSWORD') }}") | ntfy user add --role=admin "{{ lookup('env', 'NTFY_USER') }}" + (echo "{{ ntfy_password }}"; echo "{{ ntfy_password }}") | ntfy user add --role=admin "{{ ntfy_username }}" - name: Ensure Caddy sites-enabled directory exists file: diff --git a/ansible/services/uptime_kuma/setup_backup_uptime_kuma_to_lapy.yml b/ansible/services/uptime_kuma/setup_backup_uptime_kuma_to_lapy.yml index 5aa5beb..9ae9713 100644 --- a/ansible/services/uptime_kuma/setup_backup_uptime_kuma_to_lapy.yml +++ b/ansible/services/uptime_kuma/setup_backup_uptime_kuma_to_lapy.yml @@ -43,15 +43,24 @@ mkdir -p "$BACKUP_DIR" {% if remote_key_file %} - SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}" {% else %} - SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -p {{ remote_port }}" {% endif %} rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/" # Rotate old backups (keep 14 days) - find "{{ local_backup_dir }}" -maxdepth 1 -type d -name '20*' -mtime +13 -exec rm -rf {} \; + # Calculate cutoff date (14 days ago) and delete backups older than that + CUTOFF_DATE=$(date -d '14 days ago' +'%Y-%m-%d') + for dir in "{{ local_backup_dir }}"/20*; do + if [ -d "$dir" ]; then + dir_date=$(basename "$dir") + if [ "$dir_date" != "$TIMESTAMP" ] && [ "$dir_date" \< "$CUTOFF_DATE" ]; then + rm -rf "$dir" + fi + fi + done - name: Ensure cronjob for backup exists cron: @@ -63,3 +72,36 @@ - name: Run the backup script to make the first backup command: "{{ backup_script_path }}" + + - name: Verify backup was created + block: + - name: Get today's date + command: date +'%Y-%m-%d' + register: today_date + changed_when: false + + - name: Check backup directory exists and contains files + stat: + path: "{{ local_backup_dir }}/{{ today_date.stdout }}" + register: backup_dir_stat + + - name: Verify backup directory exists + assert: + that: + - backup_dir_stat.stat.exists + - backup_dir_stat.stat.isdir + fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} was not created" + success_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists" + + - name: Check if backup directory contains files + find: + paths: "{{ local_backup_dir }}/{{ today_date.stdout }}" + recurse: yes + register: backup_files + + - name: Verify backup directory is not empty + assert: + that: + - backup_files.files | length > 0 + fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists but is empty" + success_msg: "Backup directory contains {{ backup_files.files | length }} file(s)" diff --git a/ansible/services/uptime_kuma/uptime_kuma_vars.yml b/ansible/services/uptime_kuma/uptime_kuma_vars.yml index 0f885c3..3263f49 100644 --- a/ansible/services/uptime_kuma/uptime_kuma_vars.yml +++ b/ansible/services/uptime_kuma/uptime_kuma_vars.yml @@ -3,12 +3,12 @@ uptime_kuma_dir: /opt/uptime-kuma uptime_kuma_data_dir: "{{ uptime_kuma_dir }}/data" uptime_kuma_port: 3001 -# (caddy_sites_dir and subdomain now in services_config.yml) - # Remote access -remote_host: "{{ groups['watchtower'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "watchtower" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" # Local backup local_backup_dir: "{{ lookup('env', 'HOME') }}/uptime-kuma-backups" diff --git a/ansible/services/vaultwarden/deploy_vaultwarden_playbook.yml b/ansible/services/vaultwarden/deploy_vaultwarden_playbook.yml index 85badb0..0340538 100644 --- a/ansible/services/vaultwarden/deploy_vaultwarden_playbook.yml +++ b/ansible/services/vaultwarden/deploy_vaultwarden_playbook.yml @@ -119,6 +119,7 @@ content: | #!/usr/bin/env python3 import sys + import traceback import yaml from uptime_kuma_api import UptimeKumaApi, MonitorType @@ -183,7 +184,9 @@ print("SUCCESS") except Exception as e: - print(f"ERROR: {str(e)}", file=sys.stderr) + error_msg = str(e) if str(e) else repr(e) + print(f"ERROR: {error_msg}", file=sys.stderr) + traceback.print_exc(file=sys.stderr) sys.exit(1) mode: '0755' diff --git a/ansible/services/vaultwarden/setup_backup_vaultwarden_to_lapy.yml b/ansible/services/vaultwarden/setup_backup_vaultwarden_to_lapy.yml index 68cd588..064d633 100644 --- a/ansible/services/vaultwarden/setup_backup_vaultwarden_to_lapy.yml +++ b/ansible/services/vaultwarden/setup_backup_vaultwarden_to_lapy.yml @@ -41,15 +41,24 @@ mkdir -p "$BACKUP_DIR" {% if remote_key_file %} - SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}" {% else %} - SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + SSH_CMD="ssh -p {{ remote_port }}" {% endif %} rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/" # Rotate old backups (keep 14 days) - find "{{ local_backup_dir }}" -maxdepth 1 -type d -name '20*' -mtime +13 -exec rm -rf {} \; + # Calculate cutoff date (14 days ago) and delete backups older than that + CUTOFF_DATE=$(date -d '14 days ago' +'%Y-%m-%d') + for dir in "{{ local_backup_dir }}"/20*; do + if [ -d "$dir" ]; then + dir_date=$(basename "$dir") + if [ "$dir_date" != "$TIMESTAMP" ] && [ "$dir_date" \< "$CUTOFF_DATE" ]; then + rm -rf "$dir" + fi + fi + done - name: Ensure cronjob for backup exists cron: @@ -61,3 +70,36 @@ - name: Run the backup script to make the first backup command: "{{ backup_script_path }}" + + - name: Verify backup was created + block: + - name: Get today's date + command: date +'%Y-%m-%d' + register: today_date + changed_when: false + + - name: Check backup directory exists and contains files + stat: + path: "{{ local_backup_dir }}/{{ today_date.stdout }}" + register: backup_dir_stat + + - name: Verify backup directory exists + assert: + that: + - backup_dir_stat.stat.exists + - backup_dir_stat.stat.isdir + fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} was not created" + success_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists" + + - name: Check if backup directory contains files + find: + paths: "{{ local_backup_dir }}/{{ today_date.stdout }}" + recurse: yes + register: backup_files + + - name: Verify backup directory is not empty + assert: + that: + - backup_files.files | length > 0 + fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists but is empty" + success_msg: "Backup directory contains {{ backup_files.files | length }} file(s)" diff --git a/ansible/services/vaultwarden/vaultwarden_vars.yml b/ansible/services/vaultwarden/vaultwarden_vars.yml index b36d0f8..75e527d 100644 --- a/ansible/services/vaultwarden/vaultwarden_vars.yml +++ b/ansible/services/vaultwarden/vaultwarden_vars.yml @@ -6,9 +6,11 @@ vaultwarden_port: 8222 # (caddy_sites_dir and subdomain now in services_config.yml) # Remote access -remote_host: "{{ groups['vipy'][0] }}" -remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" -remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" +remote_host_name: "vipy" +remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}" +remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}" +remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}" +remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}" # Local backup local_backup_dir: "{{ lookup('env', 'HOME') }}/vaultwarden-backups" diff --git a/ansible/services_config.yml b/ansible/services_config.yml index c61a6f5..94f9faf 100644 --- a/ansible/services_config.yml +++ b/ansible/services_config.yml @@ -4,22 +4,22 @@ # Edit these subdomains to match your preferences subdomains: # Monitoring Services (on watchtower) - ntfy: test-ntfy - uptime_kuma: test-uptime + ntfy: ntfy + uptime_kuma: uptime # VPN Infrastructure (on spacey) - headscale: test-headscale + headscale: headscale # Core Services (on vipy) - vaultwarden: test-vault - forgejo: test-git - lnbits: test-lnbits + vaultwarden: vault + forgejo: git + lnbits: lnbits # Secondary Services (on vipy) - ntfy_emergency_app: test-emergency + ntfy_emergency_app: emergency # Memos (on memos-box) - memos: test-memos + memos: memos # Caddy configuration caddy_sites_dir: /etc/caddy/sites-enabled