personal_infra/ansible/services/ntfy-emergency-app/deploy_ntfy_emergency_app_playbook.yml
2025-12-07 19:02:50 +01:00

190 lines
6.9 KiB
YAML

- name: Deploy ntfy-emergency-app with Docker Compose and configure Caddy reverse proxy
hosts: vipy
become: yes
vars_files:
- ../../infra_vars.yml
- ../../infra_secrets.yml
- ../../services_config.yml
- ./ntfy_emergency_app_vars.yml
vars:
ntfy_emergency_app_subdomain: "{{ subdomains.ntfy_emergency_app }}"
caddy_sites_dir: "{{ caddy_sites_dir }}"
ntfy_emergency_app_domain: "{{ ntfy_emergency_app_subdomain }}.{{ root_domain }}"
ntfy_service_domain: "{{ subdomains.ntfy }}.{{ root_domain }}"
ntfy_emergency_app_ntfy_url: "https://{{ ntfy_service_domain }}"
ntfy_emergency_app_ntfy_user: "{{ ntfy_username | default('') }}"
ntfy_emergency_app_ntfy_password: "{{ ntfy_password | default('') }}"
uptime_kuma_api_url: "https://{{ subdomains.uptime_kuma }}.{{ root_domain }}"
tasks:
- name: Create ntfy-emergency-app directory
file:
path: "{{ ntfy_emergency_app_dir }}"
state: directory
owner: "{{ ansible_user }}"
group: "{{ ansible_user }}"
mode: '0755'
- name: Create docker-compose.yml for ntfy-emergency-app
copy:
dest: "{{ ntfy_emergency_app_dir }}/docker-compose.yml"
content: |
version: "3"
services:
ntfy-emergency-app:
image: ghcr.io/pmartincalvo/ntfy-emergency-app:latest
container_name: ntfy-emergency-app
restart: unless-stopped
ports:
- "{{ ntfy_emergency_app_port }}:3000"
environment:
NTFY_TOPIC: "{{ ntfy_emergency_app_topic }}"
NTFY_URL: "{{ ntfy_emergency_app_ntfy_url }}"
NTFY_USER: "{{ ntfy_emergency_app_ntfy_user }}"
NTFY_PASSWORD: "{{ ntfy_emergency_app_ntfy_password }}"
UI_MESSAGE: "{{ ntfy_emergency_app_ui_message }}"
- name: Deploy ntfy-emergency-app container with docker compose
command: docker compose up -d
args:
chdir: "{{ ntfy_emergency_app_dir }}"
- name: Ensure Caddy sites-enabled directory exists
file:
path: "{{ caddy_sites_dir }}"
state: directory
owner: root
group: root
mode: '0755'
- name: Ensure Caddyfile includes import directive for sites-enabled
lineinfile:
path: /etc/caddy/Caddyfile
line: 'import sites-enabled/*'
insertafter: EOF
state: present
backup: yes
- name: Create Caddy reverse proxy configuration for ntfy-emergency-app
copy:
dest: "{{ caddy_sites_dir }}/ntfy-emergency-app.conf"
content: |
{{ ntfy_emergency_app_domain }} {
reverse_proxy localhost:{{ ntfy_emergency_app_port }}
}
owner: root
group: root
mode: '0644'
- name: Reload Caddy to apply new config
command: systemctl reload caddy
- name: Create Uptime Kuma monitor setup script for ntfy-emergency-app
delegate_to: localhost
become: no
copy:
dest: /tmp/setup_ntfy_emergency_app_monitor.py
content: |
#!/usr/bin/env python3
import sys
import traceback
import yaml
from uptime_kuma_api import UptimeKumaApi, MonitorType
try:
# Load configs
with open('/tmp/ansible_config.yml', 'r') as f:
config = yaml.safe_load(f)
url = config['uptime_kuma_url']
username = config['username']
password = config['password']
monitor_url = config['monitor_url']
monitor_name = config['monitor_name']
# Connect to Uptime Kuma
api = UptimeKumaApi(url, timeout=30)
api.login(username, password)
# Get all monitors
monitors = api.get_monitors()
# Find or create "services" group
group = next((m for m in monitors if m.get('name') == 'services' and m.get('type') == 'group'), None)
if not group:
group_result = api.add_monitor(type='group', name='services')
# Refresh to get the group with id
monitors = api.get_monitors()
group = next((m for m in monitors if m.get('name') == 'services' and m.get('type') == 'group'), None)
# Check if monitor already exists
existing_monitor = None
for monitor in monitors:
if monitor.get('name') == monitor_name:
existing_monitor = monitor
break
# Get ntfy notification ID
notifications = api.get_notifications()
ntfy_notification_id = None
for notif in notifications:
if notif.get('type') == 'ntfy':
ntfy_notification_id = notif.get('id')
break
if existing_monitor:
print(f"Monitor '{monitor_name}' already exists (ID: {existing_monitor['id']})")
print("Skipping - monitor already configured")
else:
print(f"Creating monitor '{monitor_name}'...")
api.add_monitor(
type=MonitorType.HTTP,
name=monitor_name,
url=monitor_url,
parent=group['id'],
interval=60,
maxretries=3,
retryInterval=60,
notificationIDList={ntfy_notification_id: True} if ntfy_notification_id else {}
)
api.disconnect()
print("SUCCESS")
except Exception as e:
error_msg = str(e) if str(e) else repr(e)
print(f"ERROR: {error_msg}", file=sys.stderr)
traceback.print_exc(file=sys.stderr)
sys.exit(1)
mode: '0755'
- name: Create temporary config for monitor setup
delegate_to: localhost
become: no
copy:
dest: /tmp/ansible_config.yml
content: |
uptime_kuma_url: "{{ uptime_kuma_api_url }}"
username: "{{ uptime_kuma_username }}"
password: "{{ uptime_kuma_password }}"
monitor_url: "https://{{ ntfy_emergency_app_domain }}"
monitor_name: "ntfy-emergency-app"
mode: '0644'
- name: Run Uptime Kuma monitor setup
command: python3 /tmp/setup_ntfy_emergency_app_monitor.py
delegate_to: localhost
become: no
register: monitor_setup
changed_when: "'SUCCESS' in monitor_setup.stdout"
ignore_errors: yes
- name: Clean up temporary files
delegate_to: localhost
become: no
file:
path: "{{ item }}"
state: absent
loop:
- /tmp/setup_ntfy_emergency_app_monitor.py
- /tmp/ansible_config.yml