Removed default SCRAPY_AUTH_USERS value from arty.yml to ensure credentials are only read from .env file (secrets). This fixes login issues where the default placeholder value was being used instead of actual credentials from .env. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
188 lines
7.1 KiB
YAML
188 lines
7.1 KiB
YAML
name: "docker.compose"
|
|
version: "1.0.0"
|
|
description: "Valknar's Stacks"
|
|
author: "valknar@pivoine.art"
|
|
license: "private"
|
|
|
|
envs:
|
|
default:
|
|
# Common
|
|
ADMIN_EMAIL: valknar@pivoine.art
|
|
NETWORK_NAME: falcon_network
|
|
TIMEZONE: Europe/Berlin
|
|
# Core
|
|
CORE_COMPOSE_PROJECT_NAME: core
|
|
CORE_DB_HOST: postgres
|
|
CORE_DB_PORT: 5432
|
|
CORE_REDIS_HOST: redis
|
|
CORE_REDIS_PORT: 6379
|
|
CORE_REDIS_IMAGE: redis:7-alpine
|
|
CORE_POSTGRES_IMAGE: postgres:16-alpine
|
|
# VPN
|
|
VPN_TRAEFIK_ENABLED: true
|
|
VPN_COMPOSE_PROJECT_NAME: vpn
|
|
VPN_DOCKER_IMAGE: ghcr.io/wg-easy/wg-easy:latest
|
|
VPN_TRAEFIK_HOST: vpn.pivoine.art
|
|
# Track
|
|
TRACK_TRAEFIK_ENABLED: true
|
|
TRACK_COMPOSE_PROJECT_NAME: track
|
|
TRACK_DOCKER_IMAGE: ghcr.io/umami-software/umami:postgresql-latest
|
|
TRACK_TRAEFIK_HOST: umami.pivoine.art
|
|
TRACK_DB_NAME: umami
|
|
# AWSM
|
|
AWESOME_TRAEFIK_ENABLED: true
|
|
AWESOME_COMPOSE_PROJECT_NAME: awesome
|
|
AWESOME_TRAEFIK_HOST: awesome.pivoine.art
|
|
# Sexy
|
|
SEXY_TRAEFIK_ENABLED: true
|
|
SEXY_COMPOSE_PROJECT_NAME: sexy
|
|
SEXY_TRAEFIK_HOST: sexy.pivoine.art
|
|
SEXY_DIRECTUS_IMAGE: directus/directus:11.12.0
|
|
SEXY_FRONTEND_IMAGE: ghcr.io/valknarxxx/sexy:latest
|
|
SEXY_DB_NAME: directus
|
|
SEXY_PUBLIC_URL: https://sexy.pivoine.art/api
|
|
SEXY_CORS_ORIGIN: https://sexy.pivoine.art
|
|
SEXY_SESSION_COOKIE_DOMAIN: sexy.pivoine.art
|
|
SEXY_CONTENT_SECURITY_POLICY_DIRECTIVES__FRAME_SRC: https://sexy.pivoine.art
|
|
SEXY_USER_REGISTER_URL_ALLOW_LIST: https://sexy.pivoine.art/signup/verify
|
|
SEXY_PASSWORD_RESET_URL_ALLOW_LIST: https://sexy.pivoine.art/password/reset
|
|
SEXY_FRONTEND_PUBLIC_API_URL: https://sexy.pivoine.art/api
|
|
SEXY_FRONTEND_PUBLIC_URL: https://sexy.pivoine.art
|
|
# Gotify
|
|
GOTIFY_TRAEFIK_ENABLED: true
|
|
GOTIFY_COMPOSE_PROJECT_NAME: messaging
|
|
GOTIFY_IMAGE: gotify/server:latest
|
|
GOTIFY_TRAEFIK_HOST: gotify.pivoine.art
|
|
# Scrapy
|
|
SCRAPY_TRAEFIK_ENABLED: true
|
|
SCRAPY_COMPOSE_PROJECT_NAME: scrapy
|
|
SCRAPY_SCRAPYD_IMAGE: vimagick/scrapyd
|
|
SCRAPY_IMAGE: vimagick/scrapyd
|
|
SCRAPY_SCRAPYRT_IMAGE: vimagick/scrapyd
|
|
SCRAPY_TRAEFIK_HOST: scrapy.pivoine.art
|
|
SCRAPY_SCRAPYD_PORT: 6800
|
|
SCRAPY_SCRAPYRT_PORT: 9080
|
|
# n8n
|
|
N8N_TRAEFIK_ENABLED: true
|
|
N8N_COMPOSE_PROJECT_NAME: n8n
|
|
N8N_IMAGE: docker.n8n.io/n8nio/n8n
|
|
N8N_TRAEFIK_HOST: n8n.pivoine.art
|
|
N8N_PORT: 5678
|
|
N8N_DB_NAME: n8n
|
|
N8N_DB_SCHEMA: public
|
|
# Filestash
|
|
STASH_TRAEFIK_ENABLED: true
|
|
STASH_COMPOSE_PROJECT_NAME: stash
|
|
STASH_IMAGE: machines/filestash:latest
|
|
STASH_TRAEFIK_HOST: stash.pivoine.art
|
|
STASH_PORT: 8334
|
|
STASH_CANARY: true
|
|
# Proxy
|
|
PROXY_COMPOSE_PROJECT_NAME: proxy
|
|
PROXY_DOCKER_IMAGE: traefik:latest
|
|
# Watchtower
|
|
WATCHTOWER_POLL_INTERVAL: 300
|
|
WATCHTOWER_LABEL_ENABLE: true
|
|
WATCHTOWER_CLEANUP: true
|
|
WATCHTOWER_INCLUDE_STOPPED: false
|
|
WATCHTOWER_INCLUDE_RESTARTING: true
|
|
WATCHTOWER_RUN_ONCE: false
|
|
WATCHTOWER_LOG_LEVEL: info
|
|
WATCHTOWER_ROLLING_RESTART: false
|
|
|
|
scripts:
|
|
config: docker compose config
|
|
up: docker compose up -d
|
|
down: docker compose down
|
|
logs: docker compose logs -f
|
|
restart: docker compose restart
|
|
ps: docker compose ps
|
|
pull: docker compose pull
|
|
run: docker compose exec -it
|
|
env/sync: rsync -avzhe ssh ./.env root@vps:~/Projects/docker-compose/
|
|
# Database export scripts for sexy.pivoine.art
|
|
# Export PostgreSQL database schema and data with DROP IF EXISTS statements
|
|
sexy/db/export: |
|
|
docker exec core_postgres pg_dump \
|
|
-U sexy \
|
|
-d sexy \
|
|
--no-owner \
|
|
--no-acl \
|
|
--clean \
|
|
--if-exists \
|
|
> ~/Projects/docker-compose/sexy/directus.sql &&
|
|
echo "Database exported to ~/Projects/docker-compose/sexy/directus.sql"
|
|
|
|
# Export Directus schema.yaml (collections, fields, relations, etc.)
|
|
sexy/schema/export: |
|
|
docker exec sexy_api mkdir -p /directus/snapshots &&
|
|
docker exec sexy_api npx directus schema snapshot /directus/snapshots/schema.yaml &&
|
|
docker cp sexy_api:/directus/snapshots/schema.yaml ~/Projects/docker-compose/sexy/schema.yaml &&
|
|
echo "Directus schema exported to ~/Projects/docker-compose/sexy/schema.yaml"
|
|
|
|
# Combined export: both database and schema
|
|
sexy/export/all: |
|
|
echo "Exporting database..." &&
|
|
docker exec core_postgres pg_dump \
|
|
-U sexy \
|
|
-d sexy \
|
|
--no-owner \
|
|
--no-acl \
|
|
--clean \
|
|
--if-exists \
|
|
> ~/Projects/docker-compose/sexy/directus.sql &&
|
|
echo "✓ Database exported" &&
|
|
echo "Exporting Directus schema..." &&
|
|
docker exec sexy_api mkdir -p /directus/snapshots &&
|
|
docker exec sexy_api npx directus schema snapshot /directus/snapshots/schema.yaml &&
|
|
docker cp sexy_api:/directus/snapshots/schema.yaml ~/Projects/docker-compose/sexy/schema.yaml &&
|
|
echo "✓ Directus schema exported" &&
|
|
echo "All exports completed successfully!"
|
|
|
|
# Import PostgreSQL database from SQL dump
|
|
# WARNING: This will DROP existing tables if they exist (uses --clean --if-exists)
|
|
sexy/db/import: |
|
|
echo "⚠️ WARNING: This will replace the current database!" &&
|
|
echo "Make sure core_postgres container is running..." &&
|
|
docker exec -i core_postgres psql -U valknar -d directus < ~/Projects/docker-compose/sexy/directus.sql &&
|
|
echo "✓ Database imported from ~/Projects/docker-compose/sexy/directus.sql" &&
|
|
echo "Restarting Directus API..." &&
|
|
docker restart sexy_api &&
|
|
echo "✓ Import completed successfully!"
|
|
|
|
# Apply Directus schema snapshot (collections, fields, relations)
|
|
# This uses 'directus schema apply' which is non-destructive by default
|
|
sexy/schema/import: |
|
|
echo "Applying Directus schema snapshot..." &&
|
|
docker cp ~/Projects/docker-compose/sexy/schema.yaml sexy_api:/directus/schema.yaml &&
|
|
docker exec sexy_api npx directus schema apply --yes /directus/schema.yaml &&
|
|
echo "✓ Directus schema applied from ~/Projects/docker-compose/sexy/schema.yaml" &&
|
|
echo "Restarting Directus API..." &&
|
|
docker restart sexy_api &&
|
|
echo "✓ Schema import completed successfully!"
|
|
|
|
# Combined import: database and schema (use with caution!)
|
|
# Step 1: Import database (drops/recreates all tables)
|
|
# Step 2: Apply schema (updates Directus metadata)
|
|
sexy/import/all: |
|
|
echo "⚠️ WARNING: This will completely replace the database and schema!" &&
|
|
echo "Importing database..." &&
|
|
docker exec -i core_postgres psql -U ${DB_USER} -d ${SEXY_DB_NAME} < ~/Projects/docker-compose/sexy/directus.sql &&
|
|
echo "✓ Database imported" &&
|
|
echo "Applying Directus schema..." &&
|
|
docker cp ~/Projects/docker-compose/sexy/schema.yaml sexy_api:/directus/schema.yaml &&
|
|
docker exec sexy_api npx directus schema apply --yes /directus/schema.yaml &&
|
|
echo "✓ Schema applied" &&
|
|
echo "Restarting Directus API..." &&
|
|
docker restart sexy_api &&
|
|
echo "✓✓✓ Complete import finished successfully!"
|
|
|
|
sexy/uploads/export: |
|
|
rm -rf sexy/uploads
|
|
docker cp "sexy_api:/directus/uploads" "sexy/uploads"
|
|
sexy/uploads/import: |
|
|
docker cp "sexy/uploads" "sexy_api:/directus"
|
|
net/create: docker network create "$NETWORK_NAME"
|
|
awsm/import: |
|
|
docker cp "/tmp/awesome-database" "awesome_app:/home/node/.awesome"
|