redirect fulcrum

This commit is contained in:
counterweight 2025-12-24 10:27:35 +01:00
parent 859cd2d8b7
commit c6795dc581
Signed by: counterweight
GPG key ID: 883EDBAA726BD96C
3 changed files with 250 additions and 2 deletions

View file

@ -458,7 +458,7 @@
# Check if bitcoind RPC is responding # Check if bitcoind RPC is responding
check_bitcoind() { check_bitcoind() {
local response local response
response=$(curl -s --max-time 5 \ response=$(curl -s --max-time 30 \
--user "${RPC_USER}:${RPC_PASSWORD}" \ --user "${RPC_USER}:${RPC_PASSWORD}" \
--data-binary '{"jsonrpc":"1.0","id":"healthcheck","method":"getblockchaininfo","params":[]}' \ --data-binary '{"jsonrpc":"1.0","id":"healthcheck","method":"getblockchaininfo","params":[]}' \
--header 'Content-Type: application/json' \ --header 'Content-Type: application/json' \

View file

@ -72,6 +72,44 @@
group: "{{ fulcrum_group }}" group: "{{ fulcrum_group }}"
mode: '0755' mode: '0755'
# ===========================================
# SSL Certificate Generation
# ===========================================
- name: Check if SSL certificate already exists
stat:
path: "{{ fulcrum_ssl_cert_path }}"
register: fulcrum_ssl_cert_exists
when: fulcrum_ssl_enabled | default(false)
- name: Generate self-signed SSL certificate for Fulcrum
command: >
openssl req -x509 -newkey rsa:4096
-keyout {{ fulcrum_ssl_key_path }}
-out {{ fulcrum_ssl_cert_path }}
-sha256 -days {{ fulcrum_ssl_cert_days }}
-nodes
-subj "/C=XX/ST=Decentralized/L=Bitcoin/O=Fulcrum/OU=Electrum/CN=fulcrum.local"
args:
creates: "{{ fulcrum_ssl_cert_path }}"
when: fulcrum_ssl_enabled | default(false)
notify: Restart fulcrum
- name: Set SSL certificate permissions
file:
path: "{{ fulcrum_ssl_cert_path }}"
owner: "{{ fulcrum_user }}"
group: "{{ fulcrum_group }}"
mode: '0644'
when: fulcrum_ssl_enabled | default(false) and fulcrum_ssl_cert_exists.stat.exists | default(false) or fulcrum_ssl_enabled | default(false)
- name: Set SSL key permissions
file:
path: "{{ fulcrum_ssl_key_path }}"
owner: "{{ fulcrum_user }}"
group: "{{ fulcrum_group }}"
mode: '0600'
when: fulcrum_ssl_enabled | default(false)
- name: Check if Fulcrum binary already exists - name: Check if Fulcrum binary already exists
stat: stat:
path: "{{ fulcrum_binary_path }}" path: "{{ fulcrum_binary_path }}"
@ -140,6 +178,13 @@
peering = {{ 'true' if fulcrum_peering else 'false' }} peering = {{ 'true' if fulcrum_peering else 'false' }}
zmq_allow_hashtx = {{ 'true' if fulcrum_zmq_allow_hashtx else 'false' }} zmq_allow_hashtx = {{ 'true' if fulcrum_zmq_allow_hashtx else 'false' }}
# SSL/TLS Configuration
{% if fulcrum_ssl_enabled | default(false) %}
ssl = {{ fulcrum_ssl_bind }}:{{ fulcrum_ssl_port }}
cert = {{ fulcrum_ssl_cert_path }}
key = {{ fulcrum_ssl_key_path }}
{% endif %}
# Anonymize client IP addresses and TxIDs in logs # Anonymize client IP addresses and TxIDs in logs
anon_logs = {{ 'true' if fulcrum_anon_logs else 'false' }} anon_logs = {{ 'true' if fulcrum_anon_logs else 'false' }}
@ -477,3 +522,195 @@
name: fulcrum name: fulcrum
state: restarted state: restarted
- name: Setup public Fulcrum SSL forwarding on vipy via systemd-socket-proxyd
hosts: vipy
become: yes
vars_files:
- ../../infra_vars.yml
- ../../services_config.yml
- ../../infra_secrets.yml
- ./fulcrum_vars.yml
vars:
uptime_kuma_api_url: "https://{{ subdomains.uptime_kuma }}.{{ root_domain }}"
tasks:
- name: Create Fulcrum SSL proxy socket unit
copy:
dest: /etc/systemd/system/fulcrum-ssl-proxy.socket
content: |
[Unit]
Description=Fulcrum SSL Proxy Socket
[Socket]
ListenStream={{ fulcrum_ssl_port }}
[Install]
WantedBy=sockets.target
owner: root
group: root
mode: '0644'
notify: Restart fulcrum-ssl-proxy socket
- name: Create Fulcrum SSL proxy service unit
copy:
dest: /etc/systemd/system/fulcrum-ssl-proxy.service
content: |
[Unit]
Description=Fulcrum SSL Proxy to {{ fulcrum_tailscale_hostname }}
Requires=fulcrum-ssl-proxy.socket
After=network.target
[Service]
Type=notify
ExecStart=/lib/systemd/systemd-socket-proxyd {{ fulcrum_tailscale_hostname }}:{{ fulcrum_ssl_port }}
owner: root
group: root
mode: '0644'
- name: Reload systemd daemon
systemd:
daemon_reload: yes
- name: Enable and start Fulcrum SSL proxy socket
systemd:
name: fulcrum-ssl-proxy.socket
enabled: yes
state: started
- name: Allow Fulcrum SSL port through UFW
ufw:
rule: allow
port: "{{ fulcrum_ssl_port | string }}"
proto: tcp
comment: "Fulcrum SSL public access"
- name: Verify connectivity to fulcrum-box via Tailscale
wait_for:
host: "{{ fulcrum_tailscale_hostname }}"
port: "{{ fulcrum_ssl_port }}"
timeout: 10
ignore_errors: yes
- name: Display public endpoint
debug:
msg: "Fulcrum SSL public endpoint: {{ ansible_host }}:{{ fulcrum_ssl_port }}"
# ===========================================
# Uptime Kuma TCP Monitor for Public SSL Port
# ===========================================
- name: Create Uptime Kuma TCP monitor setup script for Fulcrum SSL
delegate_to: localhost
become: no
copy:
dest: /tmp/setup_fulcrum_ssl_tcp_monitor.py
content: |
#!/usr/bin/env python3
import sys
import traceback
import yaml
from uptime_kuma_api import UptimeKumaApi, MonitorType
try:
with open('/tmp/ansible_fulcrum_ssl_config.yml', 'r') as f:
config = yaml.safe_load(f)
url = config['uptime_kuma_url']
username = config['username']
password = config['password']
monitor_host = config['monitor_host']
monitor_port = config['monitor_port']
monitor_name = config['monitor_name']
api = UptimeKumaApi(url, timeout=30)
api.login(username, password)
monitors = api.get_monitors()
# Find or create "services" group
group = next((m for m in monitors if m.get('name') == 'services' and m.get('type') == 'group'), None)
if not group:
api.add_monitor(type='group', name='services')
monitors = api.get_monitors()
group = next((m for m in monitors if m.get('name') == 'services' and m.get('type') == 'group'), None)
# Check if monitor already exists
existing = next((m for m in monitors if m.get('name') == monitor_name), None)
# Get ntfy notification ID
notifications = api.get_notifications()
ntfy_notification_id = None
for notif in notifications:
if notif.get('type') == 'ntfy':
ntfy_notification_id = notif.get('id')
break
if existing:
print(f"Monitor '{monitor_name}' already exists (ID: {existing['id']})")
print("Skipping - monitor already configured")
else:
print(f"Creating TCP monitor '{monitor_name}'...")
api.add_monitor(
type=MonitorType.PORT,
name=monitor_name,
hostname=monitor_host,
port=monitor_port,
parent=group['id'],
interval=60,
maxretries=3,
retryInterval=60,
notificationIDList={ntfy_notification_id: True} if ntfy_notification_id else {}
)
api.disconnect()
print("SUCCESS")
except Exception as e:
print(f"ERROR: {str(e)}", file=sys.stderr)
traceback.print_exc(file=sys.stderr)
sys.exit(1)
mode: '0755'
- name: Create temporary config for TCP monitor setup
delegate_to: localhost
become: no
copy:
dest: /tmp/ansible_fulcrum_ssl_config.yml
content: |
uptime_kuma_url: "{{ uptime_kuma_api_url }}"
username: "{{ uptime_kuma_username }}"
password: "{{ uptime_kuma_password }}"
monitor_host: "{{ ansible_host }}"
monitor_port: {{ fulcrum_ssl_port }}
monitor_name: "Fulcrum SSL Public"
mode: '0644'
- name: Run Uptime Kuma TCP monitor setup
command: python3 /tmp/setup_fulcrum_ssl_tcp_monitor.py
delegate_to: localhost
become: no
register: tcp_monitor_setup
changed_when: "'SUCCESS' in tcp_monitor_setup.stdout"
ignore_errors: yes
- name: Display TCP monitor setup output
debug:
msg: "{{ tcp_monitor_setup.stdout_lines }}"
when: tcp_monitor_setup.stdout is defined
- name: Clean up TCP monitor temporary files
delegate_to: localhost
become: no
file:
path: "{{ item }}"
state: absent
loop:
- /tmp/setup_fulcrum_ssl_tcp_monitor.py
- /tmp/ansible_fulcrum_ssl_config.yml
handlers:
- name: Restart fulcrum-ssl-proxy socket
systemd:
name: fulcrum-ssl-proxy.socket
state: restarted

View file

@ -18,13 +18,24 @@ bitcoin_rpc_port: 8332 # Bitcoin Knots RPC port
# Network - Fulcrum server # Network - Fulcrum server
fulcrum_tcp_port: 50001 fulcrum_tcp_port: 50001
# Binding address for Fulcrum TCP server: fulcrum_ssl_port: 50002
# Binding address for Fulcrum TCP/SSL server:
# - "127.0.0.1" = localhost only (use when Caddy is on the same box) # - "127.0.0.1" = localhost only (use when Caddy is on the same box)
# - "0.0.0.0" = all interfaces (use when Caddy is on a different box) # - "0.0.0.0" = all interfaces (use when Caddy is on a different box)
# - Specific IP = bind to specific network interface # - Specific IP = bind to specific network interface
fulcrum_tcp_bind: "0.0.0.0" # Default: localhost (change to "0.0.0.0" if Caddy is on different box) fulcrum_tcp_bind: "0.0.0.0" # Default: localhost (change to "0.0.0.0" if Caddy is on different box)
fulcrum_ssl_bind: "0.0.0.0" # Binding address for SSL port
# If Caddy is on a different box, set this to the IP address that Caddy will use to connect # If Caddy is on a different box, set this to the IP address that Caddy will use to connect
# SSL/TLS Configuration
fulcrum_ssl_enabled: true
fulcrum_ssl_cert_path: "{{ fulcrum_config_dir }}/fulcrum.crt"
fulcrum_ssl_key_path: "{{ fulcrum_config_dir }}/fulcrum.key"
fulcrum_ssl_cert_days: 3650 # 10 years validity for self-signed cert
# Port forwarding configuration (for public access via VPS)
fulcrum_tailscale_hostname: "fulcrum-box"
# Performance # Performance
# db_mem will be calculated as 75% of available RAM automatically in playbook # db_mem will be calculated as 75% of available RAM automatically in playbook
fulcrum_db_mem_percent: 0.75 # 75% of RAM for database cache fulcrum_db_mem_percent: 0.75 # 75% of RAM for database cache