too much stuff
This commit is contained in:
parent
fbbeb59c0e
commit
6a43132bc8
25 changed files with 167 additions and 69 deletions
|
|
@ -1,14 +1,14 @@
|
||||||
[vipy]
|
[vps]
|
||||||
your.vps.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
vipy ansible_host=your.services.vps.ip ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
||||||
|
watchtower ansible_host=your.monitoring.vps.ip ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
||||||
|
spacey ansible_host=your.headscale.vps.ip ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
||||||
|
|
||||||
[watchtower]
|
[nodito_host]
|
||||||
your.vps.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
nodito ansible_host=your.proxmox.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key ansible_ssh_pass=your_root_password
|
||||||
|
|
||||||
[nodito]
|
[nodito_vms]
|
||||||
your.proxmox.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key ansible_ssh_pass=your_root_password
|
# Example node, replace with your VM names and addresses
|
||||||
|
# memos_box ansible_host=192.168.1.150 ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
||||||
[spacey]
|
|
||||||
your.vps.ip.here ansible_user=counterweight ansible_port=22 ansible_ssh_private_key_file=~/.ssh/your-key
|
|
||||||
|
|
||||||
# Local connection to laptop: this assumes you're running ansible commands from your personal laptop
|
# Local connection to laptop: this assumes you're running ansible commands from your personal laptop
|
||||||
# Make sure to adjust the username
|
# Make sure to adjust the username
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Secure Debian VPS
|
- name: Secure Debian VPS
|
||||||
hosts: vipy,watchtower,spacey
|
hosts: vps
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
become: true
|
become: true
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Secure Debian VPS
|
- name: Secure Debian VPS
|
||||||
hosts: vipy,watchtower,spacey
|
hosts: vps
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
become: true
|
become: true
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Deploy CPU Temperature Monitoring
|
- name: Deploy CPU Temperature Monitoring
|
||||||
hosts: nodito
|
hosts: nodito_host
|
||||||
become: yes
|
become: yes
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
|
|
|
||||||
|
|
@ -5,20 +5,18 @@
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
- ../services_config.yml
|
- ../services_config.yml
|
||||||
vars:
|
vars:
|
||||||
|
headscale_host_name: "spacey"
|
||||||
headscale_subdomain: "{{ subdomains.headscale }}"
|
headscale_subdomain: "{{ subdomains.headscale }}"
|
||||||
headscale_domain: "https://{{ headscale_subdomain }}.{{ root_domain }}"
|
headscale_domain: "https://{{ headscale_subdomain }}.{{ root_domain }}"
|
||||||
headscale_namespace: "{{ service_settings.headscale.namespace }}"
|
headscale_namespace: "{{ service_settings.headscale.namespace }}"
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
- name: Set headscale host
|
|
||||||
set_fact:
|
|
||||||
headscale_host: "{{ groups['spacey'][0] }}"
|
|
||||||
|
|
||||||
- name: Set facts for headscale server connection
|
- name: Set facts for headscale server connection
|
||||||
set_fact:
|
set_fact:
|
||||||
headscale_user: "{{ hostvars[headscale_host]['ansible_user'] }}"
|
headscale_host: "{{ hostvars.get(headscale_host_name, {}).get('ansible_host', headscale_host_name) }}"
|
||||||
headscale_key: "{{ hostvars[headscale_host]['ansible_ssh_private_key_file'] | default('') }}"
|
headscale_user: "{{ hostvars.get(headscale_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
headscale_port: "{{ hostvars[headscale_host]['ansible_port'] | default(22) }}"
|
headscale_key: "{{ hostvars.get(headscale_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
headscale_port: "{{ hostvars.get(headscale_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
- name: Get user ID for namespace from headscale server via lapy
|
- name: Get user ID for namespace from headscale server via lapy
|
||||||
delegate_to: "{{ groups['lapy'][0] }}"
|
delegate_to: "{{ groups['lapy'][0] }}"
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Bootstrap Nodito SSH Key Access
|
- name: Bootstrap Nodito SSH Key Access
|
||||||
hosts: nodito
|
hosts: nodito_host
|
||||||
become: true
|
become: true
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Switch Proxmox VE from Enterprise to Community Repositories
|
- name: Switch Proxmox VE from Enterprise to Community Repositories
|
||||||
hosts: nodito
|
hosts: nodito_host
|
||||||
become: true
|
become: true
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Setup ZFS RAID 1 Pool for Proxmox Storage
|
- name: Setup ZFS RAID 1 Pool for Proxmox Storage
|
||||||
hosts: nodito
|
hosts: nodito_host
|
||||||
become: true
|
become: true
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../infra_vars.yml
|
- ../infra_vars.yml
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Create Proxmox template from Debian cloud image (no VM clone)
|
- name: Create Proxmox template from Debian cloud image (no VM clone)
|
||||||
hosts: nodito
|
hosts: nodito_host
|
||||||
become: true
|
become: true
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../../infra_vars.yml
|
- ../../infra_vars.yml
|
||||||
|
|
|
||||||
|
|
@ -1,5 +1,5 @@
|
||||||
- name: Install and configure Caddy on Debian 12
|
- name: Install and configure Caddy on Debian 12
|
||||||
hosts: vipy,watchtower,spacey
|
hosts: vps
|
||||||
become: yes
|
become: yes
|
||||||
|
|
||||||
tasks:
|
tasks:
|
||||||
|
|
|
||||||
|
|
@ -12,9 +12,11 @@ forgejo_user: "git"
|
||||||
# (caddy_sites_dir and subdomain now in services_config.yml)
|
# (caddy_sites_dir and subdomain now in services_config.yml)
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['vipy'][0] }}"
|
remote_host_name: "vipy"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
# Local backup
|
# Local backup
|
||||||
local_backup_dir: "{{ lookup('env', 'HOME') }}/forgejo-backups"
|
local_backup_dir: "{{ lookup('env', 'HOME') }}/forgejo-backups"
|
||||||
|
|
|
||||||
|
|
@ -53,9 +53,9 @@
|
||||||
ENCRYPTED_BACKUP="{{ local_backup_dir }}/forgejo-backup-$TIMESTAMP.tar.gz.gpg"
|
ENCRYPTED_BACKUP="{{ local_backup_dir }}/forgejo-backup-$TIMESTAMP.tar.gz.gpg"
|
||||||
|
|
||||||
{% if remote_key_file %}
|
{% if remote_key_file %}
|
||||||
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}"
|
||||||
{% else %}
|
{% else %}
|
||||||
SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -p {{ remote_port }}"
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
echo "Stopping Forgejo service..."
|
echo "Stopping Forgejo service..."
|
||||||
|
|
|
||||||
|
|
@ -13,9 +13,11 @@ headscale_data_dir: /var/lib/headscale
|
||||||
# Namespace now configured in services_config.yml under service_settings.headscale.namespace
|
# Namespace now configured in services_config.yml under service_settings.headscale.namespace
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['spacey'][0] }}"
|
remote_host_name: "spacey"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
# Local backup
|
# Local backup
|
||||||
local_backup_dir: "{{ lookup('env', 'HOME') }}/headscale-backups"
|
local_backup_dir: "{{ lookup('env', 'HOME') }}/headscale-backups"
|
||||||
|
|
|
||||||
|
|
@ -43,9 +43,9 @@
|
||||||
mkdir -p "$BACKUP_DIR"
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
{% if remote_key_file %}
|
{% if remote_key_file %}
|
||||||
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}"
|
||||||
{% else %}
|
{% else %}
|
||||||
SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -p {{ remote_port }}"
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
# Stop headscale service for consistent backup
|
# Stop headscale service for consistent backup
|
||||||
|
|
|
||||||
|
|
@ -6,9 +6,11 @@ lnbits_port: 8765
|
||||||
# (caddy_sites_dir and subdomain now in services_config.yml)
|
# (caddy_sites_dir and subdomain now in services_config.yml)
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['vipy'][0] }}"
|
remote_host_name: "vipy"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
# Local backup
|
# Local backup
|
||||||
local_backup_dir: "{{ lookup('env', 'HOME') }}/lnbits-backups"
|
local_backup_dir: "{{ lookup('env', 'HOME') }}/lnbits-backups"
|
||||||
|
|
|
||||||
|
|
@ -45,9 +45,9 @@
|
||||||
ENCRYPTED_BACKUP="{{ local_backup_dir }}/lnbits-backup-$TIMESTAMP.tar.gz.gpg"
|
ENCRYPTED_BACKUP="{{ local_backup_dir }}/lnbits-backup-$TIMESTAMP.tar.gz.gpg"
|
||||||
|
|
||||||
{% if remote_key_file %}
|
{% if remote_key_file %}
|
||||||
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}"
|
||||||
{% else %}
|
{% else %}
|
||||||
SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -p {{ remote_port }}"
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
# Stop LNBits service before backup
|
# Stop LNBits service before backup
|
||||||
|
|
|
||||||
|
|
@ -5,9 +5,11 @@ memos_port: 5230
|
||||||
# (caddy_sites_dir and subdomain now in services_config.yml)
|
# (caddy_sites_dir and subdomain now in services_config.yml)
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['memos_box'][0] }}"
|
remote_host_name: "memos-box"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
# Local backup
|
# Local backup
|
||||||
local_backup_dir: "{{ lookup('env', 'HOME') }}/memos-backups"
|
local_backup_dir: "{{ lookup('env', 'HOME') }}/memos-backups"
|
||||||
|
|
|
||||||
|
|
@ -9,6 +9,8 @@ ntfy_emergency_app_topic: "emergencia"
|
||||||
ntfy_emergency_app_ui_message: "Leave Pablo a message, he will respond as soon as possible"
|
ntfy_emergency_app_ui_message: "Leave Pablo a message, he will respond as soon as possible"
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['vipy'][0] }}"
|
remote_host_name: "vipy"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
|
||||||
|
|
@ -3,6 +3,7 @@
|
||||||
become: yes
|
become: yes
|
||||||
vars_files:
|
vars_files:
|
||||||
- ../../infra_vars.yml
|
- ../../infra_vars.yml
|
||||||
|
- ../../infra_secrets.yml
|
||||||
- ../../services_config.yml
|
- ../../services_config.yml
|
||||||
- ./ntfy_vars.yml
|
- ./ntfy_vars.yml
|
||||||
vars:
|
vars:
|
||||||
|
|
@ -73,7 +74,7 @@
|
||||||
|
|
||||||
- name: Create ntfy admin user
|
- name: Create ntfy admin user
|
||||||
shell: |
|
shell: |
|
||||||
(echo "{{ lookup('env', 'NTFY_PASSWORD') }}"; echo "{{ lookup('env', 'NTFY_PASSWORD') }}") | ntfy user add --role=admin "{{ lookup('env', 'NTFY_USER') }}"
|
(echo "{{ ntfy_password }}"; echo "{{ ntfy_password }}") | ntfy user add --role=admin "{{ ntfy_username }}"
|
||||||
|
|
||||||
- name: Ensure Caddy sites-enabled directory exists
|
- name: Ensure Caddy sites-enabled directory exists
|
||||||
file:
|
file:
|
||||||
|
|
|
||||||
|
|
@ -43,15 +43,24 @@
|
||||||
mkdir -p "$BACKUP_DIR"
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
{% if remote_key_file %}
|
{% if remote_key_file %}
|
||||||
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}"
|
||||||
{% else %}
|
{% else %}
|
||||||
SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -p {{ remote_port }}"
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/"
|
rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/"
|
||||||
|
|
||||||
# Rotate old backups (keep 14 days)
|
# Rotate old backups (keep 14 days)
|
||||||
find "{{ local_backup_dir }}" -maxdepth 1 -type d -name '20*' -mtime +13 -exec rm -rf {} \;
|
# Calculate cutoff date (14 days ago) and delete backups older than that
|
||||||
|
CUTOFF_DATE=$(date -d '14 days ago' +'%Y-%m-%d')
|
||||||
|
for dir in "{{ local_backup_dir }}"/20*; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
dir_date=$(basename "$dir")
|
||||||
|
if [ "$dir_date" != "$TIMESTAMP" ] && [ "$dir_date" \< "$CUTOFF_DATE" ]; then
|
||||||
|
rm -rf "$dir"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
- name: Ensure cronjob for backup exists
|
- name: Ensure cronjob for backup exists
|
||||||
cron:
|
cron:
|
||||||
|
|
@ -63,3 +72,36 @@
|
||||||
|
|
||||||
- name: Run the backup script to make the first backup
|
- name: Run the backup script to make the first backup
|
||||||
command: "{{ backup_script_path }}"
|
command: "{{ backup_script_path }}"
|
||||||
|
|
||||||
|
- name: Verify backup was created
|
||||||
|
block:
|
||||||
|
- name: Get today's date
|
||||||
|
command: date +'%Y-%m-%d'
|
||||||
|
register: today_date
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
|
- name: Check backup directory exists and contains files
|
||||||
|
stat:
|
||||||
|
path: "{{ local_backup_dir }}/{{ today_date.stdout }}"
|
||||||
|
register: backup_dir_stat
|
||||||
|
|
||||||
|
- name: Verify backup directory exists
|
||||||
|
assert:
|
||||||
|
that:
|
||||||
|
- backup_dir_stat.stat.exists
|
||||||
|
- backup_dir_stat.stat.isdir
|
||||||
|
fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} was not created"
|
||||||
|
success_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists"
|
||||||
|
|
||||||
|
- name: Check if backup directory contains files
|
||||||
|
find:
|
||||||
|
paths: "{{ local_backup_dir }}/{{ today_date.stdout }}"
|
||||||
|
recurse: yes
|
||||||
|
register: backup_files
|
||||||
|
|
||||||
|
- name: Verify backup directory is not empty
|
||||||
|
assert:
|
||||||
|
that:
|
||||||
|
- backup_files.files | length > 0
|
||||||
|
fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists but is empty"
|
||||||
|
success_msg: "Backup directory contains {{ backup_files.files | length }} file(s)"
|
||||||
|
|
|
||||||
|
|
@ -3,12 +3,12 @@ uptime_kuma_dir: /opt/uptime-kuma
|
||||||
uptime_kuma_data_dir: "{{ uptime_kuma_dir }}/data"
|
uptime_kuma_data_dir: "{{ uptime_kuma_dir }}/data"
|
||||||
uptime_kuma_port: 3001
|
uptime_kuma_port: 3001
|
||||||
|
|
||||||
# (caddy_sites_dir and subdomain now in services_config.yml)
|
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['watchtower'][0] }}"
|
remote_host_name: "watchtower"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
# Local backup
|
# Local backup
|
||||||
local_backup_dir: "{{ lookup('env', 'HOME') }}/uptime-kuma-backups"
|
local_backup_dir: "{{ lookup('env', 'HOME') }}/uptime-kuma-backups"
|
||||||
|
|
|
||||||
|
|
@ -119,6 +119,7 @@
|
||||||
content: |
|
content: |
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
import sys
|
import sys
|
||||||
|
import traceback
|
||||||
import yaml
|
import yaml
|
||||||
from uptime_kuma_api import UptimeKumaApi, MonitorType
|
from uptime_kuma_api import UptimeKumaApi, MonitorType
|
||||||
|
|
||||||
|
|
@ -183,7 +184,9 @@
|
||||||
print("SUCCESS")
|
print("SUCCESS")
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(f"ERROR: {str(e)}", file=sys.stderr)
|
error_msg = str(e) if str(e) else repr(e)
|
||||||
|
print(f"ERROR: {error_msg}", file=sys.stderr)
|
||||||
|
traceback.print_exc(file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
mode: '0755'
|
mode: '0755'
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -41,15 +41,24 @@
|
||||||
mkdir -p "$BACKUP_DIR"
|
mkdir -p "$BACKUP_DIR"
|
||||||
|
|
||||||
{% if remote_key_file %}
|
{% if remote_key_file %}
|
||||||
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -i {{ remote_key_file }} -p {{ remote_port }}"
|
||||||
{% else %}
|
{% else %}
|
||||||
SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}"
|
SSH_CMD="ssh -p {{ remote_port }}"
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
|
||||||
rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/"
|
rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/"
|
||||||
|
|
||||||
# Rotate old backups (keep 14 days)
|
# Rotate old backups (keep 14 days)
|
||||||
find "{{ local_backup_dir }}" -maxdepth 1 -type d -name '20*' -mtime +13 -exec rm -rf {} \;
|
# Calculate cutoff date (14 days ago) and delete backups older than that
|
||||||
|
CUTOFF_DATE=$(date -d '14 days ago' +'%Y-%m-%d')
|
||||||
|
for dir in "{{ local_backup_dir }}"/20*; do
|
||||||
|
if [ -d "$dir" ]; then
|
||||||
|
dir_date=$(basename "$dir")
|
||||||
|
if [ "$dir_date" != "$TIMESTAMP" ] && [ "$dir_date" \< "$CUTOFF_DATE" ]; then
|
||||||
|
rm -rf "$dir"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
- name: Ensure cronjob for backup exists
|
- name: Ensure cronjob for backup exists
|
||||||
cron:
|
cron:
|
||||||
|
|
@ -61,3 +70,36 @@
|
||||||
|
|
||||||
- name: Run the backup script to make the first backup
|
- name: Run the backup script to make the first backup
|
||||||
command: "{{ backup_script_path }}"
|
command: "{{ backup_script_path }}"
|
||||||
|
|
||||||
|
- name: Verify backup was created
|
||||||
|
block:
|
||||||
|
- name: Get today's date
|
||||||
|
command: date +'%Y-%m-%d'
|
||||||
|
register: today_date
|
||||||
|
changed_when: false
|
||||||
|
|
||||||
|
- name: Check backup directory exists and contains files
|
||||||
|
stat:
|
||||||
|
path: "{{ local_backup_dir }}/{{ today_date.stdout }}"
|
||||||
|
register: backup_dir_stat
|
||||||
|
|
||||||
|
- name: Verify backup directory exists
|
||||||
|
assert:
|
||||||
|
that:
|
||||||
|
- backup_dir_stat.stat.exists
|
||||||
|
- backup_dir_stat.stat.isdir
|
||||||
|
fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} was not created"
|
||||||
|
success_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists"
|
||||||
|
|
||||||
|
- name: Check if backup directory contains files
|
||||||
|
find:
|
||||||
|
paths: "{{ local_backup_dir }}/{{ today_date.stdout }}"
|
||||||
|
recurse: yes
|
||||||
|
register: backup_files
|
||||||
|
|
||||||
|
- name: Verify backup directory is not empty
|
||||||
|
assert:
|
||||||
|
that:
|
||||||
|
- backup_files.files | length > 0
|
||||||
|
fail_msg: "Backup directory {{ local_backup_dir }}/{{ today_date.stdout }} exists but is empty"
|
||||||
|
success_msg: "Backup directory contains {{ backup_files.files | length }} file(s)"
|
||||||
|
|
|
||||||
|
|
@ -6,9 +6,11 @@ vaultwarden_port: 8222
|
||||||
# (caddy_sites_dir and subdomain now in services_config.yml)
|
# (caddy_sites_dir and subdomain now in services_config.yml)
|
||||||
|
|
||||||
# Remote access
|
# Remote access
|
||||||
remote_host: "{{ groups['vipy'][0] }}"
|
remote_host_name: "vipy"
|
||||||
remote_user: "{{ hostvars[remote_host]['ansible_user'] }}"
|
remote_host: "{{ hostvars.get(remote_host_name, {}).get('ansible_host', remote_host_name) }}"
|
||||||
remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}"
|
remote_user: "{{ hostvars.get(remote_host_name, {}).get('ansible_user', 'counterweight') }}"
|
||||||
|
remote_key_file: "{{ hostvars.get(remote_host_name, {}).get('ansible_ssh_private_key_file', '') }}"
|
||||||
|
remote_port: "{{ hostvars.get(remote_host_name, {}).get('ansible_port', 22) }}"
|
||||||
|
|
||||||
# Local backup
|
# Local backup
|
||||||
local_backup_dir: "{{ lookup('env', 'HOME') }}/vaultwarden-backups"
|
local_backup_dir: "{{ lookup('env', 'HOME') }}/vaultwarden-backups"
|
||||||
|
|
|
||||||
|
|
@ -4,22 +4,22 @@
|
||||||
# Edit these subdomains to match your preferences
|
# Edit these subdomains to match your preferences
|
||||||
subdomains:
|
subdomains:
|
||||||
# Monitoring Services (on watchtower)
|
# Monitoring Services (on watchtower)
|
||||||
ntfy: test-ntfy
|
ntfy: ntfy
|
||||||
uptime_kuma: test-uptime
|
uptime_kuma: uptime
|
||||||
|
|
||||||
# VPN Infrastructure (on spacey)
|
# VPN Infrastructure (on spacey)
|
||||||
headscale: test-headscale
|
headscale: headscale
|
||||||
|
|
||||||
# Core Services (on vipy)
|
# Core Services (on vipy)
|
||||||
vaultwarden: test-vault
|
vaultwarden: vault
|
||||||
forgejo: test-git
|
forgejo: git
|
||||||
lnbits: test-lnbits
|
lnbits: lnbits
|
||||||
|
|
||||||
# Secondary Services (on vipy)
|
# Secondary Services (on vipy)
|
||||||
ntfy_emergency_app: test-emergency
|
ntfy_emergency_app: emergency
|
||||||
|
|
||||||
# Memos (on memos-box)
|
# Memos (on memos-box)
|
||||||
memos: test-memos
|
memos: memos
|
||||||
|
|
||||||
# Caddy configuration
|
# Caddy configuration
|
||||||
caddy_sites_dir: /etc/caddy/sites-enabled
|
caddy_sites_dir: /etc/caddy/sites-enabled
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue