diff --git a/02_vps_core_services_setup.md b/02_vps_core_services_setup.md index 75c2ab3..a005caa 100644 --- a/02_vps_core_services_setup.md +++ b/02_vps_core_services_setup.md @@ -222,3 +222,44 @@ Personal blog is a static website served directly by Caddy. * Simply push changes to the `master` branch of your git repository. * The cron job will automatically pull and deploy updates within an hour. * For immediate updates, you can manually run: `/usr/local/bin/update-personal-blog.sh` on the server. + + +## Headscale + +Headscale is a self-hosted Tailscale control server that allows you to create your own Tailscale network. + +### Deploy + +* Decide what subdomain you want to serve Headscale on and add it to `services/headscale/headscale_vars.yml` on the `headscale_subdomain`. + * Note that you will have to add a DNS entry to point to the VPS public IP. +* Run the deployment playbook: `ansible-playbook -i inventory.ini services/headscale/deploy_headscale_playbook.yml`. + +### Configure + +* **Network Security**: The network starts with a deny-all policy - no devices can communicate with each other until you explicitly configure ACL rules in `/etc/headscale/acl.json`. +* After deployment, you need to create a namespace and generate pre-auth keys for your devices. +* SSH into your VPS and run the following commands: + ```bash + # Create a namespace + headscale user create counter-net + + # Generate a pre-auth key for device registration + headscale preauthkeys create --user 1 # Assumes you've only created one user + ``` +* Copy the generated pre-auth key - you'll need it to register your devices. + +### Connect devices + +* Install Tailscale on your devices (mobile apps, desktop clients, etc.). +* Instead of using the default Tailscale login, use your headscale server: + * Server URL: `https://headscale.contrapeso.xyz` (or your configured domain) + * Use the pre-auth key you generated above + * Full command: `tailscale up --login-server --authkey ` +* Your devices will now be part of your private Tailscale network. + +### Management + +* List connected devices: `headscale nodes list` +* View users: `headscale users list` +* Generate new pre-auth keys: `headscale preauthkeys create --user counter-net --reusable` +* Remove a device: `headscale nodes delete --identifier ` diff --git a/ansible/services/headscale/deploy_headscale_playbook.yml b/ansible/services/headscale/deploy_headscale_playbook.yml new file mode 100644 index 0000000..1293043 --- /dev/null +++ b/ansible/services/headscale/deploy_headscale_playbook.yml @@ -0,0 +1,228 @@ +- name: Deploy headscale and configure Caddy reverse proxy + hosts: vipy + become: no + vars_files: + - ../../infra_vars.yml + - ./headscale_vars.yml + vars: + headscale_domain: "{{ headscale_subdomain }}.{{ root_domain }}" + + tasks: + - name: Install required packages + become: yes + apt: + name: + - wget + - gnupg + state: present + update_cache: yes + + - name: Download headscale DEB package + get_url: + url: "https://github.com/juanfont/headscale/releases/download/v{{ headscale_version }}/headscale_{{ headscale_version }}_linux_amd64.deb" + dest: /tmp/headscale.deb + mode: '0644' + + - name: Install headscale package + become: yes + apt: + deb: /tmp/headscale.deb + state: present + + - name: Remove temporary DEB file + file: + path: /tmp/headscale.deb + state: absent + + - name: Create headscale data directory + become: yes + file: + path: /var/lib/headscale + state: directory + owner: headscale + group: headscale + mode: '0750' + + - name: Create headscale run directory + become: yes + file: + path: /var/run/headscale + state: directory + owner: headscale + group: headscale + mode: '0755' + + - name: Ensure headscale user exists + become: yes + user: + name: headscale + system: yes + shell: /usr/sbin/nologin + home: /var/lib/headscale + create_home: yes + state: present + + - name: Ensure headscale user owns data directory + become: yes + file: + path: /var/lib/headscale + owner: headscale + group: headscale + recurse: yes + + - name: Create ACL policies file + become: yes + copy: + dest: /etc/headscale/acl.json + content: | + { + "ACLs": [], + "Groups": {}, + "Hosts": {}, + "TagOwners": {}, + "Tests": [] + } + owner: headscale + group: headscale + mode: '0644' + notify: Restart headscale + + - name: Deploy headscale configuration file + become: yes + copy: + dest: /etc/headscale/config.yaml + content: | + server_url: https://{{ headscale_domain }} + listen_addr: 0.0.0.0:{{ headscale_port }} + + grpc_listen_addr: 0.0.0.0:{{ headscale_grpc_port }} + grpc_allow_insecure: false + + private_key_path: /var/lib/headscale/private.key + noise: + private_key_path: /var/lib/headscale/noise_private.key + + prefixes: + v4: 100.64.0.0/10 + v6: fd7a:115c:a1e0::/48 + + derp: + server: + enabled: true + region_id: 999 + region_code: "headscale" + region_name: "Headscale Embedded DERP" + verify_clients: true + stun_listen_addr: "0.0.0.0:3478" + private_key_path: /var/lib/headscale/derp_server_private.key + automatically_add_embedded_derp_region: true + urls: + - https://controlplane.tailscale.com/derpmap/default + + database: + type: sqlite3 + sqlite: + path: /var/lib/headscale/db.sqlite + + unix_socket: /var/run/headscale/headscale.sock + unix_socket_permission: "0770" + + log: + level: info + format: text + + policy: + path: /etc/headscale/acl.json + + dns: + base_domain: tailnet.contrapeso.xyz + magic_dns: true + search_domains: + - tailnet.contrapeso.xyz + nameservers: + global: + - 1.1.1.1 + - 1.0.0.1 + owner: root + group: root + mode: '0644' + notify: Restart headscale + + - name: Test headscale configuration + become: yes + command: headscale configtest + register: headscale_config_test + failed_when: headscale_config_test.rc != 0 + + - name: Display headscale config test results + debug: + msg: "{{ headscale_config_test.stdout }}" + + - name: Enable and start headscale service + become: yes + systemd: + name: headscale + enabled: yes + state: started + + - name: Allow HTTPS through UFW + become: yes + ufw: + rule: allow + port: '443' + proto: tcp + + - name: Allow HTTP through UFW (for Let's Encrypt) + become: yes + ufw: + rule: allow + port: '80' + proto: tcp + + - name: Allow STUN through UFW (for DERP server) + become: yes + ufw: + rule: allow + port: '3478' + proto: udp + + - name: Ensure Caddy sites-enabled directory exists + become: yes + file: + path: "{{ caddy_sites_dir }}" + state: directory + owner: root + group: root + mode: '0755' + + - name: Ensure Caddyfile includes import directive for sites-enabled + become: yes + lineinfile: + path: /etc/caddy/Caddyfile + line: 'import sites-enabled/*' + insertafter: EOF + state: present + backup: yes + + - name: Create Caddy reverse proxy configuration for headscale + become: yes + copy: + dest: "{{ caddy_sites_dir }}/headscale.conf" + content: | + {{ headscale_domain }} { + reverse_proxy localhost:{{ headscale_port }} + } + owner: root + group: root + mode: '0644' + + - name: Reload Caddy to apply new config + become: yes + command: systemctl reload caddy + + handlers: + - name: Restart headscale + become: yes + systemd: + name: headscale + state: restarted diff --git a/ansible/services/headscale/headscale_vars.yml b/ansible/services/headscale/headscale_vars.yml new file mode 100644 index 0000000..5dff982 --- /dev/null +++ b/ansible/services/headscale/headscale_vars.yml @@ -0,0 +1,22 @@ +# Headscale service configuration +headscale_subdomain: headscale +headscale_port: 8080 +headscale_grpc_port: 50443 + +# Version +headscale_version: "0.26.1" + +# Caddy +caddy_sites_dir: /etc/caddy/sites-enabled + +# Data directory +headscale_data_dir: /var/lib/headscale + +# Remote access +remote_host: "{{ groups['vipy'][0] }}" +remote_user: "{{ hostvars[remote_host]['ansible_user'] }}" +remote_key_file: "{{ hostvars[remote_host]['ansible_ssh_private_key_file'] | default('') }}" + +# Local backup +local_backup_dir: "{{ lookup('env', 'HOME') }}/headscale-backups" +backup_script_path: "{{ lookup('env', 'HOME') }}/.local/bin/headscale_backup.sh" diff --git a/ansible/services/headscale/setup_backup_headscale_to_lapy.yml b/ansible/services/headscale/setup_backup_headscale_to_lapy.yml new file mode 100644 index 0000000..6a9136a --- /dev/null +++ b/ansible/services/headscale/setup_backup_headscale_to_lapy.yml @@ -0,0 +1,75 @@ +- name: Configure local backup for Headscale from remote + hosts: lapy + gather_facts: no + vars_files: + - ../../infra_vars.yml + - ./headscale_vars.yml + vars: + remote_data_path: "{{ headscale_data_dir }}" + remote_config_path: "/etc/headscale" + + tasks: + - name: Debug remote backup vars + debug: + msg: + - "remote_host={{ remote_host }}" + - "remote_user={{ remote_user }}" + - "remote_data_path='{{ remote_data_path }}'" + - "remote_config_path='{{ remote_config_path }}'" + - "local_backup_dir={{ local_backup_dir }}" + + - name: Ensure local backup directory exists + file: + path: "{{ local_backup_dir }}" + state: directory + mode: '0755' + + - name: Ensure ~/.local/bin exists + file: + path: "{{ lookup('env', 'HOME') }}/.local/bin" + state: directory + mode: '0755' + + - name: Create backup script + copy: + dest: "{{ backup_script_path }}" + mode: '0750' + content: | + #!/bin/bash + set -euo pipefail + + TIMESTAMP=$(date +'%Y-%m-%d') + BACKUP_DIR="{{ local_backup_dir }}/$TIMESTAMP" + mkdir -p "$BACKUP_DIR" + + {% if remote_key_file %} + SSH_CMD="ssh -i {{ remote_key_file }} -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + {% else %} + SSH_CMD="ssh -p {{ hostvars[remote_host]['ansible_port'] | default(22) }}" + {% endif %} + + # Stop headscale service for consistent backup + $SSH_CMD {{ remote_user }}@{{ remote_host }} "sudo systemctl stop headscale" + + # Backup data directory + rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_data_path }}/ "$BACKUP_DIR/data/" + + # Backup config directory + rsync -az -e "$SSH_CMD" --delete {{ remote_user }}@{{ remote_host }}:{{ remote_config_path }}/ "$BACKUP_DIR/config/" + + # Start headscale service again + $SSH_CMD {{ remote_user }}@{{ remote_host }} "sudo systemctl start headscale" + + # Rotate old backups (keep 14 days) + find "{{ local_backup_dir }}" -maxdepth 1 -type d -name '20*' -mtime +13 -exec rm -rf {} \; + + - name: Ensure cronjob for backup exists + cron: + name: "Headscale backup" + user: "{{ lookup('env', 'USER') }}" + job: "{{ backup_script_path }}" + minute: 5 + hour: "9,12,15,18" + + - name: Run the backup script to make the first backup + command: "{{ backup_script_path }}"