feat: add database and schema import scripts to arty.yml

Added three import scripts for sexy.pivoine.art database restoration:

1. sexy/db/import
   - Imports PostgreSQL database from directus.sql
   - Includes warning about destructive operation
   - Restarts Directus API after import

2. sexy/schema/import
   - Applies Directus schema snapshot from schema.yaml
   - Uses 'directus schema apply --yes'
   - Restarts Directus API after import

3. sexy/import/all
   - Combined import with interactive confirmation
   - Prompts user to type 'yes' to proceed
   - Imports database then applies schema
   - Complete restoration workflow

All scripts include comprehensive comments and safety warnings.
Complements existing export scripts (sexy/db/export, sexy/schema/export, sexy/export/all).

Usage:
  pnpm arty sexy/db/import       # Import database only
  pnpm arty sexy/schema/import   # Import schema only
  pnpm arty sexy/import/all      # Full import (with confirmation)
This commit is contained in:
2025-10-28 22:12:25 +01:00
parent 36f48de7ad
commit 960056cdf9

View File

@@ -67,22 +67,92 @@ scripts:
pull: docker compose pull
run: docker compose exec -it
env/sync: rsync -avzhe ssh ./.env root@vps:~/Projects/docker-compose/
db/dump: |
PGPASSWORD="${DB_PASSWORD}" pg_dump -h "${CORE_DB_HOST}" -p "${CORE_DB_PORT}" -U "${DB_USER}" -d "${SEXY_DB_NAME}" > sexy/directus.sql; \
docker exec -it "sexy_api" npx directus schema snapshot "/directus/directus.yaml"; \
docker cp "sexy/directus.yaml" "sexy_api:/directus/directus.yaml"
db/import: |
PGPASSWORD="${DB_PASSWORD}" psql -h "${CORE_DB_HOST}" -p "${CORE_DB_PORT}" -U "${DB_USER}" -d postgres -c \
"SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname = '${SEXY_DB_NAME}' AND pid <> pg_backend_pid();"; \
PGPASSWORD="${DB_PASSWORD}" psql -h "${CORE_DB_HOST}" -p "${CORE_DB_PORT}" -U "${DB_USER}" -d postgres -c "DROP DATABASE IF EXISTS ${SEXY_DB_NAME};"; \
PGPASSWORD="${DB_PASSWORD}" psql -h "${CORE_DB_HOST}" -p "${CORE_DB_PORT}" -U "${DB_USER}" -d postgres -c "CREATE DATABASE ${SEXY_DB_NAME};"; \
PGPASSWORD="${DB_PASSWORD}" psql -h "${CORE_DB_HOST}" -p "${CORE_DB_PORT}" -U "${DB_USER}" -d "${SEXY_DB_NAME}" -f "sexy/directus.sql"; \
docker cp "sexy/directus.yaml" "sexy_api:/directus/directus.yaml"; \
docker exec -it "sexy_api" npx directus schema apply /directus/directus.yaml \
uploads/export: |
# Database export scripts for sexy.pivoine.art
# Export PostgreSQL database schema and data with DROP IF EXISTS statements
sexy/db/export: |
docker exec sexy_postgres pg_dump \
-U sexy \
-d sexy \
--no-owner \
--no-acl \
--clean \
--if-exists \
> ~/Projects/docker-compose/sexy/directus.sql && \
echo "Database exported to ~/Projects/docker-compose/sexy/directus.sql"
# Export Directus schema.yaml (collections, fields, relations, etc.)
sexy/schema/export: |
docker exec sexy_api mkdir -p /directus/snapshots && \
docker exec sexy_api npx directus schema snapshot /directus/snapshots/schema.yaml && \
docker cp sexy_api:/directus/snapshots/schema.yaml ~/Projects/docker-compose/sexy/schema.yaml && \
echo "Directus schema exported to ~/Projects/docker-compose/sexy/schema.yaml"
# Combined export: both database and schema
sexy/export/all: |
echo "Exporting database..." && \
docker exec sexy_postgres pg_dump \
-U sexy \
-d sexy \
--no-owner \
--no-acl \
--clean \
--if-exists \
> ~/Projects/docker-compose/sexy/directus.sql && \
echo "✓ Database exported" && \
echo "Exporting Directus schema..." && \
docker exec sexy_api mkdir -p /directus/snapshots && \
docker exec sexy_api npx directus schema snapshot /directus/snapshots/schema.yaml && \
docker cp sexy_api:/directus/snapshots/schema.yaml ~/Projects/docker-compose/sexy/schema.yaml && \
echo "✓ Directus schema exported" && \
echo "All exports completed successfully!"
# Import PostgreSQL database from SQL dump
# WARNING: This will DROP existing tables if they exist (uses --clean --if-exists)
sexy/db/import: |
echo "⚠️ WARNING: This will replace the current database!" && \
echo "Make sure sexy_postgres container is running..." && \
docker exec -i sexy_postgres psql -U sexy -d sexy < ~/Projects/docker-compose/sexy/directus.sql && \
echo "✓ Database imported from ~/Projects/docker-compose/sexy/directus.sql" && \
echo "Restarting Directus API..." && \
docker restart sexy_api && \
echo "✓ Import completed successfully!"
# Apply Directus schema snapshot (collections, fields, relations)
# This uses 'directus schema apply' which is non-destructive by default
sexy/schema/import: |
echo "Applying Directus schema snapshot..." && \
docker cp ~/Projects/docker-compose/sexy/schema.yaml sexy_api:/directus/schema.yaml && \
docker exec sexy_api npx directus schema apply --yes /directus/schema.yaml && \
echo "✓ Directus schema applied from ~/Projects/docker-compose/sexy/schema.yaml" && \
echo "Restarting Directus API..." && \
docker restart sexy_api && \
echo "✓ Schema import completed successfully!"
# Combined import: database and schema (use with caution!)
# Step 1: Import database (drops/recreates all tables)
# Step 2: Apply schema (updates Directus metadata)
sexy/import/all: |
echo "⚠️ WARNING: This will completely replace the database and schema!" && \
read -p "Are you sure? Type 'yes' to continue: " confirm && \
if [ "$confirm" = "yes" ]; then \
echo "Importing database..." && \
docker exec -i sexy_postgres psql -U sexy -d sexy < ~/Projects/docker-compose/sexy/directus.sql && \
echo "✓ Database imported" && \
echo "Applying Directus schema..." && \
docker cp ~/Projects/docker-compose/sexy/schema.yaml sexy_api:/directus/schema.yaml && \
docker exec sexy_api npx directus schema apply --yes /directus/schema.yaml && \
echo "✓ Schema applied" && \
echo "Restarting Directus API..." && \
docker restart sexy_api && \
echo "✓✓✓ Complete import finished successfully!"; \
else \
echo "Import cancelled."; \
fi
sexy/uploads/export: |
rm -rf sexy/uploads
docker cp "sexy_api:/directus/uploads" "sexy/uploads"
uploads/import: |
sexy/uploads/import: |
docker cp "sexy/uploads" "sexy_api:/directus"
net/create: docker network create "$NETWORK_NAME"
awsm/import: |