Fix: Update authentik Caddy template to use HTTP backend

- Change reverse_proxy from https:// to http:// backend
- Use authentik_http_port instead of authentik_https_port
- Remove unnecessary TLS transport configuration
- Remove health check for non-existent endpoint

This aligns the Ansible template with the working configuration
where authentik only serves HTTP internally and Caddy handles SSL.
This commit is contained in:
2025-12-04 19:45:04 +01:00
parent b3c3fe5c56
commit 0507e3291d
11 changed files with 11 additions and 256 deletions

View File

@@ -1,66 +0,0 @@
---
# Sigvild Gallery Data Backup Playbook
#
# This playbook creates a compressed backup of all Sigvild Gallery production data
# including the PocketBase database and uploaded files.
- name: Backup Sigvild Gallery Production Data
hosts: arch-vps
become: true
gather_facts: true
vars:
# Backup configuration - can be overridden with --extra-vars
sigvild_gallery_backup_local_path: "{{ playbook_dir }}/backups/sigvild-gallery"
pre_tasks:
- name: Ensure local backup directory exists
local_action:
module: file
path: "{{ sigvild_gallery_backup_local_path }}"
state: directory
mode: '0755'
become: false
run_once: true
- name: Display backup operation info
debug:
msg:
- "🔄 Starting Sigvild Gallery Data Backup"
- "Target server: {{ inventory_hostname }}"
- "Local backup storage: {{ sigvild_gallery_backup_local_path }}"
- "Timestamp: {{ ansible_date_time.iso8601 }}"
tasks:
- name: Execute backup tasks
include_role:
name: sigvild-gallery
tasks_from: backup
tags: [backup, data]
post_tasks:
- name: List local backups
local_action:
module: find
paths: "{{ sigvild_gallery_backup_local_path }}"
patterns: "sigvild-gallery-backup-*.tar.gz"
register: all_backups
become: false
- name: Display backup summary
debug:
msg:
- "✅ Backup operation completed successfully!"
- "Total backups available: {{ all_backups.files | length }}"
- "Latest backup: sigvild-gallery-backup-{{ ansible_date_time.iso8601_basic_short }}.tar.gz"
- "Backup location: {{ sigvild_gallery_backup_local_path }}"
- ""
- "⚠️ IMPORTANT: Store this backup safely before formatting your server!"
- "💡 To restore: Run normal deployment - restoration is automatic if backup exists"
- name: Show backup file details
debug:
msg: "Backup: {{ item.path | basename }} - {{ (item.size / 1024 / 1024) | round(2) }}MB - {{ item.mtime | to_datetime('%Y-%m-%d %H:%M:%S') }}"
loop: "{{ all_backups.files | sort(attribute='mtime') | reverse }}"
loop_control:
label: "{{ item.path | basename }}"

View File

@@ -1,52 +0,0 @@
---
# Sigvild Gallery Deployment Playbook
- name: Deploy Sigvild Wedding Gallery
hosts: arch-vps
become: yes
gather_facts: yes
vars:
# Local project path - adjust as needed
sigvild_gallery_local_project_path: "{{ ansible_env.PWD }}/../sigvild-gallery"
pre_tasks:
- name: Verify local sigvild-gallery project exists
local_action:
module: stat
path: "{{ sigvild_gallery_local_project_path }}"
register: project_exists
become: no
- name: Fail if project directory doesn't exist
fail:
msg: "Sigvild Gallery project not found at {{ sigvild_gallery_local_project_path }}"
when: not project_exists.stat.exists
- name: Display deployment information
debug:
msg:
- "Deploying Sigvild Gallery from: {{ sigvild_gallery_local_project_path }}"
- "Frontend domain: {{ sigvild_gallery_frontend_domain }}"
- "API domain: {{ sigvild_gallery_api_domain }}"
roles:
- role: sigvild-gallery
tags: ['sigvild', 'gallery', 'wedding']
post_tasks:
- name: Wait for API to be ready
wait_for:
port: "{{ sigvild_gallery_port }}"
host: "{{ sigvild_gallery_host }}"
timeout: 60
tags: [verify]
- name: Display deployment results
debug:
msg:
- "✅ Sigvild Gallery deployment completed!"
- "Frontend: https://{{ sigvild_gallery_frontend_domain }}"
- "API: https://{{ sigvild_gallery_api_domain }}"
- "Service status: systemctl status sigvild-gallery"
- "Logs: journalctl -u sigvild-gallery -f"

View File

@@ -44,7 +44,7 @@ authentik_valkey_db: 1 # Use database 1 for Authentik
authentik_domain: "auth.jnss.me"
authentik_http_port: 9000
authentik_bind_address: "0.0.0.0"
authentik_bind_address: "127.0.0.1"
# =================================================================
# Authentik Core Configuration

View File

@@ -1,37 +0,0 @@
# Authentik Server Container Quadlet
# Generated by rick-infra Ansible role
[Unit]
Description=Authentik Server Container
Requires={{ authentik_pod_name }}-pod.service
After={{ authentik_pod_name }}-pod.service
[Container]
ContainerName=authentik-server
Image={{ authentik_image_server }}:{{ authentik_image_tag }}
Pod={{ authentik_pod_name }}.pod
# Environment configuration
EnvironmentFile={{ authentik_home }}/.config/containers/authentik.env
Environment=AUTHENTIK_LISTEN__HTTP=0.0.0.0:9000
Environment=AUTHENTIK_LISTEN__HTTPS=0.0.0.0:9443
# Server command
Exec=server
# Volumes for persistent data
Volume={{ authentik_home }}/data:/data:Z
Volume={{ authentik_home }}/media:/media:Z
# Health check
HealthCmd=ak healthcheck
HealthInterval=30s
HealthTimeout=10s
HealthRetries=3
[Service]
Restart=always
RestartSec=10
[Install]
WantedBy=default.target

View File

@@ -1,35 +0,0 @@
# Authentik Worker Container Quadlet
# Generated by rick-infra Ansible role
[Unit]
Description=Authentik Worker Container
Requires={{ authentik_pod_name }}-pod.service authentik-server.service
After={{ authentik_pod_name }}-pod.service authentik-server.service
[Container]
ContainerName=authentik-worker
Image={{ authentik_image_server }}:{{ authentik_image_tag }}
Pod={{ authentik_pod_name }}.pod
# Environment configuration
EnvironmentFile={{ authentik_home }}/.config/containers/authentik.env
# Worker command
Exec=worker
# Volumes for persistent data
Volume={{ authentik_home }}/data:/data:Z
Volume={{ authentik_home }}/media:/media:Z
# Health check
HealthCmd=ak healthcheck
HealthInterval=30s
HealthTimeout=10s
HealthRetries=3
[Service]
Restart=always
RestartSec=10
[Install]
WantedBy=default.target

View File

@@ -1,21 +1,11 @@
# Authentik Authentication Service
{{ authentik_domain }} {
reverse_proxy https://{{ authentik_bind_address }}:{{ authentik_https_port }} {
transport http {
tls_insecure_skip_verify
}
header_up Host {upstream_hostport}
reverse_proxy http://{{ authentik_bind_address }}:{{ authentik_http_port }} {
header_up Host {host}
header_up X-Real-IP {remote_host}
header_up X-Forwarded-Proto https
header_up X-Forwarded-For {remote_host}
header_up X-Forwarded-Host {host}
# Health check
health_uri /if/health/live/
health_timeout 10s
health_interval 30s
health_status 200
}
# Security headers
@@ -29,7 +19,7 @@
# Authentik-specific paths
handle_path /outpost.goauthentik.io/* {
reverse_proxy https://{{ authentik_bind_address }}:{{ authentik_https_port }}
reverse_proxy http://{{ authentik_bind_address }}:{{ authentik_http_port }}
}
# Logging
@@ -38,4 +28,4 @@
level INFO
format json
}
}
}

View File

@@ -15,6 +15,7 @@ AUTHENTIK_CACHE__URL=unix://{{ valkey_unix_socket_path }}?db={{ authentik_valkey
AUTHENTIK_SECRET_KEY={{ authentik_secret_key }}
AUTHENTIK_LOG_LEVEL={{ authentik_log_level }}
AUTHENTIK_ERROR_REPORTING__ENABLED={{ authentik_error_reporting | lower }}
AUTHENTIK_URL=https://{{authentik_domain}}
# Security Configuration
AUTHENTIK_COOKIE_DOMAIN={{ authentik_domain }}

View File

@@ -1,25 +0,0 @@
# Authentik Pod Quadlet
# Generated by rick-infra Ansible role
[Unit]
Description=Authentik Authentication Service Pod
Wants=network-online.target
After=network-online.target
[Pod]
PodName={{ authentik_pod_name }}
Network={{ authentik_network_name }}
# Mount Unix socket for PostgreSQL (Valkey uses TCP via host.containers.internal)
Volume=/run/postgresql:/run/postgresql:ro
# Host gateway allows access to localhost services
{% if authentik_enable_host_gateway | default(true) %}
AddHost=host.containers.internal:host-gateway
{% endif %}
# Published ports for web access
PublishPort={{ authentik_http_port }}:9000
PublishPort={{ authentik_https_port }}:9443
[Install]
WantedBy=default.target

View File

@@ -53,10 +53,6 @@ valkey_appendonly: false # RDB only for simplicity
# =================================================================
# Security Configuration
# =================================================================
# Security hardening is now built into the custom service file
# Valkey security settings
valkey_timeout: 300
valkey_tcp_keepalive: 300
valkey_tcp_backlog: 511

View File

@@ -56,23 +56,6 @@
backup: yes
notify: restart valkey
- name: Create systemd override directory for Valkey security
file:
path: /etc/systemd/system/valkey.service.d
state: directory
mode: '0755'
when: valkey_systemd_security
- name: Deploy Valkey systemd security override
template:
src: systemd-override.conf.j2
dest: /etc/systemd/system/valkey.service.d/override.conf
mode: '0644'
when: valkey_systemd_security
notify:
- reload systemd
- restart valkey
- name: Enable and start Valkey service
systemd:
name: valkey
@@ -107,7 +90,7 @@
when: valkey_service_state == "started" and valkey_unix_socket_enabled
- name: Wait for Valkey to be ready (Unix Socket) - Try with auth if needed
command: redis-cli -s {{ valkey_unix_socket_path }} -a {{ valkey_requirepass }} ping
command: redis-cli -s {{ valkey_unix_socket_path }} -a {{ valkey_password }} ping
register: valkey_socket_ping_auth
until: valkey_socket_ping_auth.stdout == "PONG"
retries: 5
@@ -120,14 +103,14 @@
("NOAUTH" in (valkey_socket_ping_noauth.stdout + valkey_socket_ping_noauth.stderr) or valkey_socket_ping_noauth.rc != 0)
- name: Test Valkey connectivity (TCP)
command: redis-cli -h {{ valkey_bind }} -p {{ valkey_port }} -a {{ valkey_requirepass }} ping
command: redis-cli -h {{ valkey_bind }} -p {{ valkey_port }} -a {{ valkey_password }} ping
register: valkey_ping_result_tcp
changed_when: false
failed_when: valkey_ping_result_tcp.stdout != "PONG"
when: valkey_service_state == "started" and not valkey_unix_socket_enabled
- name: Test Valkey connectivity (Unix Socket)
command: redis-cli -s {{ valkey_unix_socket_path }} -a {{ valkey_requirepass }} ping
command: redis-cli -s {{ valkey_unix_socket_path }} -a {{ valkey_password }} ping
register: valkey_ping_result_socket
changed_when: false
failed_when: valkey_ping_result_socket.stdout != "PONG"

View File

@@ -40,7 +40,7 @@ tcp-keepalive {{ valkey_tcp_keepalive }}
# =================================================================
# Require password for all operations
requirepass {{ valkey_requirepass }}
requirepass {{ valkey_password }}
# =================================================================
# Memory Management
@@ -136,4 +136,4 @@ client-output-buffer-limit pubsub 32mb 8mb 60
# - Database 3+: Future applications
#
# Connection example:
# redis-cli -h {{ valkey_bind }} -p {{ valkey_port }} -a {{ valkey_requirepass }} -n 1
# redis-cli -h {{ valkey_bind }} -p {{ valkey_port }} -a {{ valkey_password }} -n 1