name: "docker.compose" version: "1.0.0" description: "Valknar's Stacks" author: "valknar@pivoine.art" license: "private" envs: default: # Common ADMIN_EMAIL: valknar@pivoine.art NETWORK_NAME: falcon_network TIMEZONE: Europe/Berlin EMAIL_FROM: hi@pivoine.art # Core CORE_COMPOSE_PROJECT_NAME: core CORE_DB_HOST: postgres CORE_DB_PORT: 5432 CORE_REDIS_HOST: redis CORE_REDIS_PORT: 6379 CORE_REDIS_IMAGE: redis:7-alpine CORE_POSTGRES_IMAGE: postgres:16-alpine # Backrest (Restic) CORE_RESTIC_TRAEFIK_ENABLED: true CORE_RESTIC_IMAGE: garethgeorge/backrest:latest CORE_RESTIC_TRAEFIK_HOST: restic.pivoine.art CORE_RESTIC_HOSTNAME: falcon CORE_RESTIC_BACKUP_PATH: /mnt/hidrive/users/valknar/Backup # Sexy SEXY_TRAEFIK_ENABLED: true SEXY_COMPOSE_PROJECT_NAME: sexy SEXY_TRAEFIK_HOST: sexy.pivoine.art SEXY_DIRECTUS_IMAGE: directus/directus:11.12.0 SEXY_FRONTEND_IMAGE: ghcr.io/valknarxxx/sexy:latest SEXY_DB_NAME: directus SEXY_PUBLIC_URL: https://sexy.pivoine.art/api SEXY_CORS_ORIGIN: https://sexy.pivoine.art SEXY_SESSION_COOKIE_DOMAIN: sexy.pivoine.art SEXY_CONTENT_SECURITY_POLICY_DIRECTIVES__FRAME_SRC: https://sexy.pivoine.art SEXY_USER_REGISTER_URL_ALLOW_LIST: https://sexy.pivoine.art/signup/verify SEXY_PASSWORD_RESET_URL_ALLOW_LIST: https://sexy.pivoine.art/password/reset SEXY_FRONTEND_PUBLIC_API_URL: https://sexy.pivoine.art/api SEXY_FRONTEND_PUBLIC_URL: https://sexy.pivoine.art # Util Stack (Mattermost, Tandoor, Linkwarden, Vault, Joplin, PairDrop) UTIL_TRAEFIK_ENABLED: true UTIL_COMPOSE_PROJECT_NAME: util # Mattermost UTIL_MATTERMOST_IMAGE: mattermost/mattermost-team-edition:latest UTIL_MATTERMOST_TRAEFIK_HOST: mattermost.pivoine.art UTIL_MATTERMOST_DB_NAME: mattermost # Tandoor UTIL_TANDOOR_IMAGE: vabene1111/recipes:latest UTIL_TANDOOR_TRAEFIK_HOST: tandoor.pivoine.art UTIL_TANDOOR_DB_NAME: tandoor UTIL_TANDOOR_ENABLE_SIGNUP: 0 UTIL_TANDOOR_REVERSE_PROXY_AUTH: 0 UTIL_TANDOOR_EMAIL_USE_TLS: 0 UTIL_TANDOOR_EMAIL_USE_SSL: 1 UTIL_TANDOOR_GUNICORN_MEDIA: 0 UTIL_TANDOOR_COMMENT_PREF_DEFAULT: 1 UTIL_TANDOOR_SHOPPING_MIN_AUTOSYNC_INTERVAL: 5 # Linkwarden UTIL_LINKS_DOCKER_IMAGE: ghcr.io/linkwarden/linkwarden:latest UTIL_LINKS_TRAEFIK_HOST: links.pivoine.art UTIL_LINKS_DB_NAME: linkwarden UTIL_LINKS_MEILI_IMAGE: getmeili/meilisearch:v1.12.8 UTIL_LINKS_MEILI_NO_ANALYTICS: true # Vault UTIL_VAULT_IMAGE: vaultwarden/server:latest UTIL_VAULT_TRAEFIK_HOST: vault.pivoine.art UTIL_VAULT_WEBSOCKET_ENABLED: true UTIL_VAULT_SIGNUPS_ALLOWED: true UTIL_VAULT_INVITATIONS_ALLOWED: true UTIL_VAULT_SHOW_PASSWORD_HINT: false # Joplin UTIL_JOPLIN_IMAGE: joplin/server:latest UTIL_JOPLIN_TRAEFIK_HOST: joplin.pivoine.art UTIL_JOPLIN_APP_PORT: 22300 UTIL_JOPLIN_DB_NAME: joplin # PairDrop UTIL_DROP_TRAEFIK_HOST: drop.pivoine.art # Filestash STASH_TRAEFIK_ENABLED: true STASH_COMPOSE_PROJECT_NAME: stash STASH_IMAGE: machines/filestash:latest STASH_TRAEFIK_HOST: stash.pivoine.art STASH_PORT: 8334 STASH_CANARY: true # Jellyfin JELLY_TRAEFIK_ENABLED: true JELLY_COMPOSE_PROJECT_NAME: jelly JELLY_TRAEFIK_HOST: jelly.pivoine.art # Media Stack (Jellyfin, Filestash, Koel) MEDIA_TRAEFIK_ENABLED: true MEDIA_COMPOSE_PROJECT_NAME: media MEDIA_JELLYFIN_IMAGE: jellyfin/jellyfin:latest MEDIA_JELLYFIN_TRAEFIK_HOST: jellyfin.media.pivoine.art MEDIA_FILESTASH_IMAGE: machines/filestash:latest MEDIA_FILESTASH_TRAEFIK_HOST: filestash.media.pivoine.art MEDIA_FILESTASH_CANARY: true # Dev (Gitea + Coolify) DEV_TRAEFIK_ENABLED: true DEV_COMPOSE_PROJECT_NAME: dev DEV_GITEA_IMAGE: gitea/gitea:latest DEV_GITEA_TRAEFIK_HOST: dev.pivoine.art DEV_GITEA_DB_NAME: gitea DEV_GITEA_RUNNER_IMAGE: gitea/act_runner:latest DEV_GITEA_RUNNER_NAME: docker-runner DEV_COOLIFY_IMAGE: ghcr.io/coollabsio/coolify:latest DEV_COOLIFY_TRAEFIK_HOST: coolify.dev.pivoine.art DEV_COOLIFY_DB_NAME: coolify DEV_N8N_IMAGE: docker.n8n.io/n8nio/n8n DEV_N8N_TRAEFIK_HOST: n8n.dev.pivoine.art DEV_N8N_DB_NAME: n8n DEV_N8N_DB_SCHEMA: public DEV_ASCIINEMA_IMAGE: ghcr.io/asciinema/asciinema-server:latest DEV_ASCIINEMA_TRAEFIK_HOST: asciinema.dev.pivoine.art DEV_ASCIINEMA_DB_NAME: asciinema DEV_ASCIINEMA_SIGN_UP_DISABLED: true # Net (Traefik Proxy, Netdata, Watchtower, Umami) NET_TRAEFIK_ENABLED: true NET_COMPOSE_PROJECT_NAME: net # Traefik Proxy NET_PROXY_DOCKER_IMAGE: traefik:latest NET_PROXY_TRAEFIK_HOST: proxy.pivoine.art NET_PROXY_PORT_HTTP: 80 NET_PROXY_PORT_HTTPS: 443 NET_PROXY_LOG_LEVEL: INFO # Netdata NET_NETDATA_IMAGE: netdata/netdata:latest NET_NETDATA_TRAEFIK_HOST: netdata.pivoine.art NET_NETDATA_HOSTNAME: netdata.pivoine.art # Umami Analytics NET_TRACK_DOCKER_IMAGE: ghcr.io/umami-software/umami:postgresql-latest NET_TRACK_TRAEFIK_HOST: umami.pivoine.art NET_TRACK_DB_NAME: umami # AI Stack AI_TRAEFIK_ENABLED: true AI_COMPOSE_PROJECT_NAME: ai AI_POSTGRES_IMAGE: pgvector/pgvector:pg16 AI_WEBUI_IMAGE: ghcr.io/open-webui/open-webui:main AI_CRAWL4AI_IMAGE: unclecode/crawl4ai:latest AI_FACEFUSION_IMAGE: facefusion/facefusion:3.5.0-cpu AI_FACEFUSION_TRAEFIK_ENABLED: true AI_FACEFUSION_TRAEFIK_HOST: facefusion.ai.pivoine.art AI_FACEFUSION_EXECUTION_PROVIDERS: cpu AI_TRAEFIK_HOST: ai.pivoine.art AI_DB_USER: ai AI_DB_NAME: openwebui AI_WEBUI_NAME: Pivoine AI AI_ENABLE_SIGNUP: true AI_ENABLE_RAG_WEB_SEARCH: true AI_ENABLE_RAG_SSL_VERIFY: true AI_RAG_EMBEDDING_ENGINE: openai AI_RAG_EMBEDDING_MODEL: text-embedding-3-small AI_VECTOR_DB: pgvector AI_CRAWL4AI_PORT: 11235 AI_OPENAI_API_BASE_URLS: https://api.anthropic.com/v1 AI_LITELLM_TRAEFIK_HOST: llm.ai.pivoine.art # Watchtower WATCHTOWER_POLL_INTERVAL: 300 WATCHTOWER_LABEL_ENABLE: true WATCHTOWER_CLEANUP: true WATCHTOWER_INCLUDE_STOPPED: false WATCHTOWER_INCLUDE_RESTARTING: true WATCHTOWER_RUN_ONCE: false WATCHTOWER_LOG_LEVEL: info WATCHTOWER_ROLLING_RESTART: false WATCHTOWER_NOTIFICATIONS: shoutrrr scripts: config: docker compose config up: docker compose up -d down: docker compose down logs: docker compose logs -f restart: docker compose restart ps: docker compose ps pull: docker compose pull run: docker compose exec -it env/sync: rsync -avzhe ssh ./.env root@vps:~/Projects/docker-compose/ # Database export scripts for sexy.pivoine.art # Export PostgreSQL database schema and data with DROP IF EXISTS statements sexy/db/export: | docker exec core_postgres pg_dump \ -U sexy \ -d sexy \ --no-owner \ --no-acl \ --clean \ --if-exists \ > ~/Projects/docker-compose/sexy/directus.sql && echo "Database exported to ~/Projects/docker-compose/sexy/directus.sql" # Export Directus schema.yaml (collections, fields, relations, etc.) sexy/schema/export: | docker exec sexy_api mkdir -p /directus/snapshots && docker exec sexy_api npx directus schema snapshot /directus/snapshots/schema.yaml && docker cp sexy_api:/directus/snapshots/schema.yaml ~/Projects/docker-compose/sexy/schema.yaml && echo "Directus schema exported to ~/Projects/docker-compose/sexy/schema.yaml" # Combined export: both database and schema sexy/export/all: | echo "Exporting database..." && docker exec core_postgres pg_dump \ -U sexy \ -d sexy \ --no-owner \ --no-acl \ --clean \ --if-exists \ > ~/Projects/docker-compose/sexy/directus.sql && echo "✓ Database exported" && echo "Exporting Directus schema..." && docker exec sexy_api mkdir -p /directus/snapshots && docker exec sexy_api npx directus schema snapshot /directus/snapshots/schema.yaml && docker cp sexy_api:/directus/snapshots/schema.yaml ~/Projects/docker-compose/sexy/schema.yaml && echo "✓ Directus schema exported" && echo "All exports completed successfully!" # Import PostgreSQL database from SQL dump # WARNING: This will DROP existing tables if they exist (uses --clean --if-exists) sexy/db/import: | echo "⚠️ WARNING: This will replace the current database!" && echo "Make sure core_postgres container is running..." && docker exec -i core_postgres psql -U valknar -d directus < ~/Projects/docker-compose/sexy/directus.sql && echo "✓ Database imported from ~/Projects/docker-compose/sexy/directus.sql" && echo "Restarting Directus API..." && docker restart sexy_api && echo "✓ Import completed successfully!" # Apply Directus schema snapshot (collections, fields, relations) # This uses 'directus schema apply' which is non-destructive by default sexy/schema/import: | echo "Applying Directus schema snapshot..." && docker cp ~/Projects/docker-compose/sexy/schema.yaml sexy_api:/directus/schema.yaml && docker exec sexy_api npx directus schema apply --yes /directus/schema.yaml && echo "✓ Directus schema applied from ~/Projects/docker-compose/sexy/schema.yaml" && echo "Restarting Directus API..." && docker restart sexy_api && echo "✓ Schema import completed successfully!" # Combined import: database and schema (use with caution!) # Step 1: Import database (drops/recreates all tables) # Step 2: Apply schema (updates Directus metadata) sexy/import/all: | echo "⚠️ WARNING: This will completely replace the database and schema!" && echo "Importing database..." && docker exec -i core_postgres psql -U ${DB_USER} -d ${SEXY_DB_NAME} < ~/Projects/docker-compose/sexy/directus.sql && echo "✓ Database imported" && echo "Applying Directus schema..." && docker cp ~/Projects/docker-compose/sexy/schema.yaml sexy_api:/directus/schema.yaml && docker exec sexy_api npx directus schema apply --yes /directus/schema.yaml && echo "✓ Schema applied" && echo "Restarting Directus API..." && docker restart sexy_api && echo "✓✓✓ Complete import finished successfully!" sexy/uploads/export: | rm -rf sexy/uploads && docker cp sexy_api:/directus/uploads sexy/uploads sexy/uploads/import: | docker cp sexy/uploads sexy_api:/directus # Update Directus extension bundle from latest sexy_frontend image sexy/bundle/update: | docker run --rm -v core_directus_bundle:/target ${SEXY_FRONTEND_IMAGE} sh -c 'cp -r /home/node/app/packages/bundle/dist/* /target/dist/' && echo "✓ Bundle updated from ${SEXY_FRONTEND_IMAGE}" && docker restart sexy_api && echo "✓ Directus API restarted" net/create: docker network create "$NETWORK_NAME"