stuff
This commit is contained in:
parent
83fa331ae4
commit
c14d61d090
21 changed files with 265 additions and 4880 deletions
|
|
@ -1,4 +0,0 @@
|
|||
new_user: counterweight
|
||||
ssh_port: 22
|
||||
allow_ssh_from: "any"
|
||||
root_domain: contrapeso.xyz
|
||||
|
|
@ -99,6 +99,7 @@
|
|||
--login-server {{ headscale_domain }}
|
||||
--authkey {{ auth_key }}
|
||||
--accept-dns=true
|
||||
--advertise-tags "tag:{{ inventory_hostname }}"
|
||||
register: tailscale_up_result
|
||||
changed_when: "'already authenticated' not in tailscale_up_result.stdout"
|
||||
failed_when: tailscale_up_result.rc != 0 and 'already authenticated' not in tailscale_up_result.stdout
|
||||
|
|
|
|||
|
|
@ -9,3 +9,13 @@ uptime_kuma_password: "your_password_here"
|
|||
|
||||
ntfy_username: "your_ntfy_username"
|
||||
ntfy_password: "your_ntfy_password"
|
||||
|
||||
# headscale-ui credentials
|
||||
# Used for HTTP basic authentication via Caddy
|
||||
# Provide either:
|
||||
# - headscale_ui_password: plain text password (will be hashed automatically)
|
||||
# - headscale_ui_password_hash: pre-hashed bcrypt password (more secure, use caddy hash-password to generate)
|
||||
|
||||
headscale_ui_username: "admin"
|
||||
headscale_ui_password: "your_secure_password_here"
|
||||
# headscale_ui_password_hash: "$2a$14$..." # Optional: pre-hashed password
|
||||
|
|
|
|||
|
|
@ -1,6 +1,3 @@
|
|||
# Infrastructure Variables
|
||||
# Generated by setup_layer_0.sh
|
||||
|
||||
new_user: counterweight
|
||||
ssh_port: 22
|
||||
allow_ssh_from: "any"
|
||||
|
|
|
|||
142
ansible/services/headscale/deploy_headscale_ui_playbook.yml
Normal file
142
ansible/services/headscale/deploy_headscale_ui_playbook.yml
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
- name: Deploy headscale-ui with Docker and configure Caddy reverse proxy
|
||||
hosts: spacey
|
||||
become: yes
|
||||
vars_files:
|
||||
- ../../infra_vars.yml
|
||||
- ../../services_config.yml
|
||||
- ../../infra_secrets.yml
|
||||
- ./headscale_vars.yml
|
||||
vars:
|
||||
headscale_subdomain: "{{ subdomains.headscale }}"
|
||||
caddy_sites_dir: "{{ caddy_sites_dir }}"
|
||||
headscale_domain: "{{ headscale_subdomain }}.{{ root_domain }}"
|
||||
headscale_ui_version: "2025.08.23"
|
||||
headscale_ui_dir: /opt/headscale-ui
|
||||
headscale_ui_http_port: 18080
|
||||
headscale_ui_https_port: 18443
|
||||
|
||||
tasks:
|
||||
- name: Check if Docker is installed
|
||||
command: docker --version
|
||||
register: docker_check
|
||||
changed_when: false
|
||||
failed_when: false
|
||||
|
||||
- name: Fail if Docker is not installed
|
||||
fail:
|
||||
msg: "Docker is not installed. Please run the docker_playbook.yml first."
|
||||
when: docker_check.rc != 0
|
||||
|
||||
- name: Ensure Docker service is running
|
||||
systemd:
|
||||
name: docker
|
||||
state: started
|
||||
enabled: yes
|
||||
|
||||
- name: Create headscale-ui directory
|
||||
file:
|
||||
path: "{{ headscale_ui_dir }}"
|
||||
state: directory
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0755'
|
||||
|
||||
- name: Create docker-compose.yml for headscale-ui
|
||||
copy:
|
||||
dest: "{{ headscale_ui_dir }}/docker-compose.yml"
|
||||
content: |
|
||||
version: "3"
|
||||
services:
|
||||
headscale-ui:
|
||||
image: ghcr.io/gurucomputing/headscale-ui:{{ headscale_ui_version }}
|
||||
container_name: headscale-ui
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "{{ headscale_ui_http_port }}:8080"
|
||||
- "{{ headscale_ui_https_port }}:8443"
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0644'
|
||||
|
||||
- name: Deploy headscale-ui container with docker compose
|
||||
command: docker compose up -d
|
||||
args:
|
||||
chdir: "{{ headscale_ui_dir }}"
|
||||
register: docker_compose_result
|
||||
changed_when: "'Creating' in docker_compose_result.stdout or 'Starting' in docker_compose_result.stdout or docker_compose_result.rc != 0"
|
||||
|
||||
- name: Wait for headscale-ui to be ready
|
||||
uri:
|
||||
url: "http://localhost:{{ headscale_ui_http_port }}"
|
||||
status_code: [200, 404]
|
||||
register: headscale_ui_ready
|
||||
until: headscale_ui_ready.status in [200, 404]
|
||||
retries: 30
|
||||
delay: 2
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Ensure Caddy sites-enabled directory exists
|
||||
file:
|
||||
path: "{{ caddy_sites_dir }}"
|
||||
state: directory
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0755'
|
||||
|
||||
- name: Ensure Caddyfile includes import directive for sites-enabled
|
||||
lineinfile:
|
||||
path: /etc/caddy/Caddyfile
|
||||
line: 'import sites-enabled/*'
|
||||
insertafter: EOF
|
||||
state: present
|
||||
backup: yes
|
||||
|
||||
- name: Fail if username is not provided
|
||||
fail:
|
||||
msg: "headscale_ui_username must be set in infra_secrets.yml"
|
||||
when: headscale_ui_username is not defined
|
||||
|
||||
- name: Fail if neither password nor password hash is provided
|
||||
fail:
|
||||
msg: "Either headscale_ui_password or headscale_ui_password_hash must be set in infra_secrets.yml"
|
||||
when: headscale_ui_password is not defined and headscale_ui_password_hash is not defined
|
||||
|
||||
- name: Generate bcrypt hash for headscale-ui password
|
||||
become: yes
|
||||
command: caddy hash-password --plaintext "{{ headscale_ui_password }}"
|
||||
register: headscale_ui_password_hash_result
|
||||
changed_when: false
|
||||
no_log: true
|
||||
when: headscale_ui_password is defined and headscale_ui_password_hash is not defined
|
||||
|
||||
- name: Set headscale-ui password hash from generated value
|
||||
set_fact:
|
||||
headscale_ui_password_hash: "{{ headscale_ui_password_hash_result.stdout.strip() }}"
|
||||
when: headscale_ui_password is defined and headscale_ui_password_hash is not defined
|
||||
|
||||
- name: Update headscale Caddy config to include headscale-ui /web route with authentication
|
||||
become: yes
|
||||
copy:
|
||||
dest: "{{ caddy_sites_dir }}/headscale.conf"
|
||||
content: |
|
||||
{{ headscale_domain }} {
|
||||
@headscale_ui {
|
||||
path /web*
|
||||
}
|
||||
handle @headscale_ui {
|
||||
basicauth {
|
||||
{{ headscale_ui_username }} {{ headscale_ui_password_hash }}
|
||||
}
|
||||
reverse_proxy http://localhost:{{ headscale_ui_http_port }}
|
||||
}
|
||||
# Headscale API is protected by its own API key authentication
|
||||
# All API operations require a valid Bearer token in the Authorization header
|
||||
reverse_proxy * http://localhost:{{ headscale_port }}
|
||||
}
|
||||
owner: root
|
||||
group: root
|
||||
mode: '0644'
|
||||
|
||||
- name: Reload Caddy to apply new config
|
||||
command: systemctl reload caddy
|
||||
|
||||
|
|
@ -14,6 +14,7 @@
|
|||
ntfy_emergency_app_ntfy_url: "https://{{ ntfy_service_domain }}"
|
||||
ntfy_emergency_app_ntfy_user: "{{ ntfy_username | default('') }}"
|
||||
ntfy_emergency_app_ntfy_password: "{{ ntfy_password | default('') }}"
|
||||
uptime_kuma_api_url: "https://{{ subdomains.uptime_kuma }}.{{ root_domain }}"
|
||||
|
||||
tasks:
|
||||
- name: Create ntfy-emergency-app directory
|
||||
|
|
@ -77,3 +78,113 @@
|
|||
|
||||
- name: Reload Caddy to apply new config
|
||||
command: systemctl reload caddy
|
||||
|
||||
- name: Create Uptime Kuma monitor setup script for ntfy-emergency-app
|
||||
delegate_to: localhost
|
||||
become: no
|
||||
copy:
|
||||
dest: /tmp/setup_ntfy_emergency_app_monitor.py
|
||||
content: |
|
||||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import traceback
|
||||
import yaml
|
||||
from uptime_kuma_api import UptimeKumaApi, MonitorType
|
||||
|
||||
try:
|
||||
# Load configs
|
||||
with open('/tmp/ansible_config.yml', 'r') as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
url = config['uptime_kuma_url']
|
||||
username = config['username']
|
||||
password = config['password']
|
||||
monitor_url = config['monitor_url']
|
||||
monitor_name = config['monitor_name']
|
||||
|
||||
# Connect to Uptime Kuma
|
||||
api = UptimeKumaApi(url, timeout=30)
|
||||
api.login(username, password)
|
||||
|
||||
# Get all monitors
|
||||
monitors = api.get_monitors()
|
||||
|
||||
# Find or create "services" group
|
||||
group = next((m for m in monitors if m.get('name') == 'services' and m.get('type') == 'group'), None)
|
||||
if not group:
|
||||
group_result = api.add_monitor(type='group', name='services')
|
||||
# Refresh to get the group with id
|
||||
monitors = api.get_monitors()
|
||||
group = next((m for m in monitors if m.get('name') == 'services' and m.get('type') == 'group'), None)
|
||||
|
||||
# Check if monitor already exists
|
||||
existing_monitor = None
|
||||
for monitor in monitors:
|
||||
if monitor.get('name') == monitor_name:
|
||||
existing_monitor = monitor
|
||||
break
|
||||
|
||||
# Get ntfy notification ID
|
||||
notifications = api.get_notifications()
|
||||
ntfy_notification_id = None
|
||||
for notif in notifications:
|
||||
if notif.get('type') == 'ntfy':
|
||||
ntfy_notification_id = notif.get('id')
|
||||
break
|
||||
|
||||
if existing_monitor:
|
||||
print(f"Monitor '{monitor_name}' already exists (ID: {existing_monitor['id']})")
|
||||
print("Skipping - monitor already configured")
|
||||
else:
|
||||
print(f"Creating monitor '{monitor_name}'...")
|
||||
api.add_monitor(
|
||||
type=MonitorType.HTTP,
|
||||
name=monitor_name,
|
||||
url=monitor_url,
|
||||
parent=group['id'],
|
||||
interval=60,
|
||||
maxretries=3,
|
||||
retryInterval=60,
|
||||
notificationIDList={ntfy_notification_id: True} if ntfy_notification_id else {}
|
||||
)
|
||||
|
||||
api.disconnect()
|
||||
print("SUCCESS")
|
||||
|
||||
except Exception as e:
|
||||
error_msg = str(e) if str(e) else repr(e)
|
||||
print(f"ERROR: {error_msg}", file=sys.stderr)
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
sys.exit(1)
|
||||
mode: '0755'
|
||||
|
||||
- name: Create temporary config for monitor setup
|
||||
delegate_to: localhost
|
||||
become: no
|
||||
copy:
|
||||
dest: /tmp/ansible_config.yml
|
||||
content: |
|
||||
uptime_kuma_url: "{{ uptime_kuma_api_url }}"
|
||||
username: "{{ uptime_kuma_username }}"
|
||||
password: "{{ uptime_kuma_password }}"
|
||||
monitor_url: "https://{{ ntfy_emergency_app_domain }}"
|
||||
monitor_name: "ntfy-emergency-app"
|
||||
mode: '0644'
|
||||
|
||||
- name: Run Uptime Kuma monitor setup
|
||||
command: python3 /tmp/setup_ntfy_emergency_app_monitor.py
|
||||
delegate_to: localhost
|
||||
become: no
|
||||
register: monitor_setup
|
||||
changed_when: "'SUCCESS' in monitor_setup.stdout"
|
||||
ignore_errors: yes
|
||||
|
||||
- name: Clean up temporary files
|
||||
delegate_to: localhost
|
||||
become: no
|
||||
file:
|
||||
path: "{{ item }}"
|
||||
state: absent
|
||||
loop:
|
||||
- /tmp/setup_ntfy_emergency_app_monitor.py
|
||||
- /tmp/ansible_config.yml
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ subdomains:
|
|||
lnbits: wallet
|
||||
|
||||
# Secondary Services (on vipy)
|
||||
ntfy_emergency_app: emergency
|
||||
ntfy_emergency_app: avisame
|
||||
personal_blog: pablohere
|
||||
|
||||
# Memos (on memos-box)
|
||||
|
|
|
|||
|
|
@ -1,32 +0,0 @@
|
|||
# Centralized Services Configuration
|
||||
# Copy this to services_config.yml and customize
|
||||
|
||||
# Edit these subdomains to match your preferences
|
||||
subdomains:
|
||||
# Monitoring Services (on watchtower)
|
||||
ntfy: ntfy
|
||||
uptime_kuma: uptime
|
||||
|
||||
# VPN Infrastructure (on spacey)
|
||||
headscale: headscale
|
||||
|
||||
# Core Services (on vipy)
|
||||
vaultwarden: vault
|
||||
forgejo: git
|
||||
lnbits: lnbits
|
||||
|
||||
# Secondary Services (on vipy)
|
||||
ntfy_emergency_app: emergency
|
||||
|
||||
# Memos (on memos-box)
|
||||
memos: memos
|
||||
|
||||
# Caddy configuration
|
||||
caddy_sites_dir: /etc/caddy/sites-enabled
|
||||
|
||||
# Service-specific settings shared across playbooks
|
||||
service_settings:
|
||||
ntfy:
|
||||
topic: alerts
|
||||
headscale:
|
||||
namespace: counter-net
|
||||
Loading…
Add table
Add a link
Reference in a new issue