feat: better logging
This commit is contained in:
15
CLAUDE.md
15
CLAUDE.md
@@ -106,19 +106,16 @@ Start production frontend server (local):
|
||||
pnpm --filter @sexy.pivoine.art/frontend start
|
||||
```
|
||||
|
||||
Docker deployment (recommended for production):
|
||||
Docker Compose deployment (recommended for production):
|
||||
```bash
|
||||
# Build Docker image
|
||||
docker build -t sexy.pivoine.art:latest .
|
||||
# Local development (with Postgres, Redis, Directus)
|
||||
docker-compose up -d
|
||||
|
||||
# Run with docker-compose
|
||||
docker-compose -f docker-compose.production.yml up -d
|
||||
|
||||
# Or run directly
|
||||
docker run -d -p 3000:3000 --env-file .env.production sexy.pivoine.art:latest
|
||||
# Production (with Traefik, external DB, Redis)
|
||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
||||
```
|
||||
|
||||
See `DOCKER.md` for comprehensive Docker deployment guide.
|
||||
See `COMPOSE.md` for Docker Compose guide and `DOCKER.md` for standalone Docker deployment.
|
||||
|
||||
## Architecture Notes
|
||||
|
||||
|
||||
424
COMPOSE.md
Normal file
424
COMPOSE.md
Normal file
@@ -0,0 +1,424 @@
|
||||
# Docker Compose Guide
|
||||
|
||||
This guide explains the Docker Compose setup for sexy.pivoine.art with local development and production configurations.
|
||||
|
||||
## Architecture Overview
|
||||
|
||||
The application uses a **multi-file compose setup** with two configurations:
|
||||
|
||||
1. **`compose.yml`** - Base configuration for local development
|
||||
2. **`compose.production.yml`** - Production overrides with Traefik integration
|
||||
|
||||
### Service Architecture
|
||||
|
||||
```
|
||||
┌─────────────────────────────────────────────────────────────┐
|
||||
│ 🌐 Traefik Reverse Proxy (Production Only) │
|
||||
│ ├─ HTTPS Termination │
|
||||
│ ├─ Automatic Let's Encrypt │
|
||||
│ └─ Routes traffic to frontend & Directus API │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ 💄 Frontend (SvelteKit) │
|
||||
│ ├─ Port 3000 (internal) │
|
||||
│ ├─ Serves on https://sexy.pivoine.art │
|
||||
│ └─ Proxies /api to Directus │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ 🎭 Directus CMS │
|
||||
│ ├─ Port 8055 (internal) │
|
||||
│ ├─ Serves on https://sexy.pivoine.art/api │
|
||||
│ ├─ Custom bundle extensions mounted │
|
||||
│ └─ Uploads volume │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ 🗄️ PostgreSQL (Local) / External (Production) │
|
||||
│ └─ Database for Directus │
|
||||
├─────────────────────────────────────────────────────────────┤
|
||||
│ 💾 Redis (Local) / External (Production) │
|
||||
│ └─ Cache & session storage │
|
||||
└─────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Local Development Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Docker 20.10+
|
||||
- Docker Compose 2.0+
|
||||
|
||||
### Quick Start
|
||||
|
||||
1. **Create environment file:**
|
||||
|
||||
```bash
|
||||
cp .env.example .env
|
||||
# Edit .env with your local settings (defaults work fine)
|
||||
```
|
||||
|
||||
2. **Start all services:**
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
```
|
||||
|
||||
3. **Access services:**
|
||||
- Frontend: http://localhost:3000 (if enabled)
|
||||
- Directus: http://localhost:8055
|
||||
- Directus Admin: http://localhost:8055/admin
|
||||
|
||||
4. **View logs:**
|
||||
|
||||
```bash
|
||||
docker-compose logs -f
|
||||
```
|
||||
|
||||
5. **Stop services:**
|
||||
|
||||
```bash
|
||||
docker-compose down
|
||||
```
|
||||
|
||||
### Local Services
|
||||
|
||||
#### PostgreSQL
|
||||
- **Image:** `postgres:16-alpine`
|
||||
- **Port:** 5432 (internal only)
|
||||
- **Volume:** `postgres-data`
|
||||
- **Database:** `sexy`
|
||||
|
||||
#### Redis
|
||||
- **Image:** `redis:7-alpine`
|
||||
- **Port:** 6379 (internal only)
|
||||
- **Volume:** `redis-data`
|
||||
- **Persistence:** AOF enabled
|
||||
|
||||
#### Directus
|
||||
- **Image:** `directus/directus:11`
|
||||
- **Port:** 8055 (exposed)
|
||||
- **Volumes:**
|
||||
- `directus-uploads` - File uploads
|
||||
- `./packages/bundle/dist` - Custom extensions
|
||||
- **Features:**
|
||||
- Auto-reload extensions
|
||||
- WebSockets enabled
|
||||
- CORS enabled for localhost
|
||||
|
||||
### Local Development Workflow
|
||||
|
||||
```bash
|
||||
# Start infrastructure (Postgres, Redis, Directus)
|
||||
docker-compose up -d
|
||||
|
||||
# Develop frontend locally with hot reload
|
||||
cd packages/frontend
|
||||
pnpm dev
|
||||
|
||||
# Build Directus bundle
|
||||
pnpm --filter @sexy.pivoine.art/bundle build
|
||||
|
||||
# Restart Directus to load new bundle
|
||||
docker-compose restart directus
|
||||
```
|
||||
|
||||
## Production Deployment
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- External PostgreSQL database
|
||||
- External Redis instance
|
||||
- Traefik reverse proxy configured
|
||||
- External network: `compose_network`
|
||||
|
||||
### Setup
|
||||
|
||||
The production compose file now uses the `include` directive to automatically extend `compose.yml`, making deployment simpler.
|
||||
|
||||
1. **Create production environment file:**
|
||||
|
||||
```bash
|
||||
cp .env.production.example .env.production
|
||||
```
|
||||
|
||||
2. **Edit `.env.production` with your values:**
|
||||
|
||||
```bash
|
||||
# Database (external)
|
||||
CORE_DB_HOST=your-postgres-host
|
||||
SEXY_DB_NAME=sexy_production
|
||||
DB_USER=sexy
|
||||
DB_PASSWORD=your-secure-password
|
||||
|
||||
# Redis (external)
|
||||
CORE_REDIS_HOST=your-redis-host
|
||||
|
||||
# Directus
|
||||
SEXY_DIRECTUS_SECRET=your-32-char-random-secret
|
||||
ADMIN_PASSWORD=your-secure-admin-password
|
||||
|
||||
# Traefik
|
||||
SEXY_TRAEFIK_HOST=sexy.pivoine.art
|
||||
|
||||
# Frontend
|
||||
PUBLIC_API_URL=https://sexy.pivoine.art/api
|
||||
PUBLIC_URL=https://sexy.pivoine.art
|
||||
|
||||
# Email (SMTP)
|
||||
EMAIL_SMTP_HOST=smtp.your-provider.com
|
||||
EMAIL_SMTP_USER=your-email@domain.com
|
||||
EMAIL_SMTP_PASSWORD=your-smtp-password
|
||||
```
|
||||
|
||||
3. **Deploy:**
|
||||
|
||||
```bash
|
||||
# Simple deployment - compose.production.yml includes compose.yml automatically
|
||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
||||
|
||||
# Or use the traditional multi-file approach (same result)
|
||||
docker-compose -f compose.yml -f compose.production.yml --env-file .env.production up -d
|
||||
```
|
||||
|
||||
### Production Services
|
||||
|
||||
#### Directus
|
||||
- **Image:** `directus/directus:11` (configurable)
|
||||
- **Network:** `compose_network` (external)
|
||||
- **Volumes:**
|
||||
- `/var/www/sexy.pivoine.art/uploads` - Persistent uploads
|
||||
- `/var/www/sexy.pivoine.art/packages/bundle/dist` - Extensions
|
||||
- **Traefik routing:**
|
||||
- Domain: `sexy.pivoine.art/api`
|
||||
- Strips `/api` prefix before forwarding
|
||||
- HTTPS with auto-certificates
|
||||
|
||||
#### Frontend
|
||||
- **Image:** `ghcr.io/valknarxxx/sexy:latest` (from GHCR)
|
||||
- **Network:** `compose_network` (external)
|
||||
- **Volume:** `/var/www/sexy.pivoine.art` - Application code
|
||||
- **Traefik routing:**
|
||||
- Domain: `sexy.pivoine.art`
|
||||
- HTTPS with auto-certificates
|
||||
|
||||
### Traefik Integration
|
||||
|
||||
Both services are configured with Traefik labels for automatic routing:
|
||||
|
||||
**Frontend:**
|
||||
- HTTP → HTTPS redirect
|
||||
- Routes `sexy.pivoine.art` to port 3000
|
||||
- Gzip compression enabled
|
||||
|
||||
**Directus API:**
|
||||
- HTTP → HTTPS redirect
|
||||
- Routes `sexy.pivoine.art/api` to port 8055
|
||||
- Strips `/api` prefix
|
||||
- Gzip compression enabled
|
||||
|
||||
### Production Commands
|
||||
|
||||
```bash
|
||||
# Deploy/update (simplified - uses include)
|
||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
||||
|
||||
# View logs
|
||||
docker-compose -f compose.production.yml logs -f
|
||||
|
||||
# Restart specific service
|
||||
docker-compose -f compose.production.yml restart frontend
|
||||
|
||||
# Stop all services
|
||||
docker-compose -f compose.production.yml down
|
||||
|
||||
# Update images
|
||||
docker-compose -f compose.production.yml pull
|
||||
docker-compose -f compose.production.yml up -d
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
### Local Development (`.env`)
|
||||
|
||||
| Variable | Default | Description |
|
||||
|----------|---------|-------------|
|
||||
| `DB_DATABASE` | `sexy` | Database name |
|
||||
| `DB_USER` | `sexy` | Database user |
|
||||
| `DB_PASSWORD` | `sexy` | Database password |
|
||||
| `DIRECTUS_SECRET` | - | Secret for Directus (min 32 chars) |
|
||||
| `ADMIN_EMAIL` | `admin@sexy.pivoine.art` | Admin email |
|
||||
| `ADMIN_PASSWORD` | `admin` | Admin password |
|
||||
| `CORS_ORIGIN` | `http://localhost:3000` | CORS allowed origins |
|
||||
|
||||
See `.env.example` for full list.
|
||||
|
||||
### Production (`.env.production`)
|
||||
|
||||
| Variable | Description | Required |
|
||||
|----------|-------------|----------|
|
||||
| `CORE_DB_HOST` | External PostgreSQL host | ✅ |
|
||||
| `SEXY_DB_NAME` | Database name | ✅ |
|
||||
| `DB_PASSWORD` | Database password | ✅ |
|
||||
| `CORE_REDIS_HOST` | External Redis host | ✅ |
|
||||
| `SEXY_DIRECTUS_SECRET` | Directus secret key | ✅ |
|
||||
| `SEXY_TRAEFIK_HOST` | Domain name | ✅ |
|
||||
| `EMAIL_SMTP_HOST` | SMTP server | ✅ |
|
||||
| `EMAIL_SMTP_PASSWORD` | SMTP password | ✅ |
|
||||
| `SEXY_FRONTEND_PUBLIC_API_URL` | Frontend API URL | ✅ |
|
||||
| `SEXY_FRONTEND_PUBLIC_URL` | Frontend public URL | ✅ |
|
||||
|
||||
See `.env.production.example` for full list.
|
||||
|
||||
**Note:** All frontend-specific variables are prefixed with `SEXY_FRONTEND_` for clarity.
|
||||
|
||||
## Volumes
|
||||
|
||||
### Local Development
|
||||
|
||||
- `postgres-data` - PostgreSQL database
|
||||
- `redis-data` - Redis persistence
|
||||
- `directus-uploads` - Uploaded files
|
||||
|
||||
### Production
|
||||
|
||||
- `/var/www/sexy.pivoine.art/uploads` - Directus uploads
|
||||
- `/var/www/sexy.pivoine.art` - Application code (frontend)
|
||||
|
||||
## Networks
|
||||
|
||||
### Local: `sexy-network`
|
||||
- Bridge network
|
||||
- Internal communication only
|
||||
- Directus exposed on 8055
|
||||
|
||||
### Production: `compose_network`
|
||||
- External network (pre-existing)
|
||||
- Connects to Traefik
|
||||
- No exposed ports (Traefik handles routing)
|
||||
|
||||
## Health Checks
|
||||
|
||||
All services include health checks:
|
||||
|
||||
**PostgreSQL:**
|
||||
- Command: `pg_isready`
|
||||
- Interval: 10s
|
||||
|
||||
**Redis:**
|
||||
- Command: `redis-cli ping`
|
||||
- Interval: 10s
|
||||
|
||||
**Directus:**
|
||||
- Endpoint: `/server/health`
|
||||
- Interval: 30s
|
||||
|
||||
**Frontend:**
|
||||
- HTTP GET: `localhost:3000`
|
||||
- Interval: 30s
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Local Development
|
||||
|
||||
**Problem:** Directus won't start
|
||||
|
||||
```bash
|
||||
# Check logs
|
||||
docker-compose logs directus
|
||||
|
||||
# Common issues:
|
||||
# 1. Database not ready - wait for postgres to be healthy
|
||||
# 2. Wrong secret - check DIRECTUS_SECRET is at least 32 chars
|
||||
```
|
||||
|
||||
**Problem:** Can't connect to database
|
||||
|
||||
```bash
|
||||
# Check if postgres is running
|
||||
docker-compose ps postgres
|
||||
|
||||
# Verify health
|
||||
docker-compose exec postgres pg_isready -U sexy
|
||||
```
|
||||
|
||||
**Problem:** Extensions not loading
|
||||
|
||||
```bash
|
||||
# Rebuild bundle
|
||||
pnpm --filter @sexy.pivoine.art/bundle build
|
||||
|
||||
# Verify volume mount
|
||||
docker-compose exec directus ls -la /directus/extensions/
|
||||
|
||||
# Restart Directus
|
||||
docker-compose restart directus
|
||||
```
|
||||
|
||||
### Production
|
||||
|
||||
**Problem:** Services not accessible via domain
|
||||
|
||||
```bash
|
||||
# Check Traefik labels
|
||||
docker inspect sexy_frontend | grep traefik
|
||||
|
||||
# Verify compose_network exists
|
||||
docker network ls | grep compose_network
|
||||
|
||||
# Check Traefik is running
|
||||
docker ps | grep traefik
|
||||
```
|
||||
|
||||
**Problem:** Can't connect to external database
|
||||
|
||||
```bash
|
||||
# Test connection from Directus container
|
||||
docker-compose exec directus sh
|
||||
apk add postgresql-client
|
||||
psql -h $CORE_DB_HOST -U $DB_USER -d $SEXY_DB_NAME
|
||||
```
|
||||
|
||||
**Problem:** Frontend can't reach Directus API
|
||||
|
||||
```bash
|
||||
# Check Directus is accessible
|
||||
curl https://sexy.pivoine.art/api/server/health
|
||||
|
||||
# Verify CORS settings
|
||||
# PUBLIC_API_URL should match the public Directus URL
|
||||
```
|
||||
|
||||
## Migration from Old Setup
|
||||
|
||||
If migrating from `docker-compose.production.yml`:
|
||||
|
||||
1. **Rename environment variables** according to `.env.production.example`
|
||||
2. **Update command** to use both compose files
|
||||
3. **Verify Traefik labels** match your setup
|
||||
4. **Test** with `docker-compose config` to see merged configuration
|
||||
|
||||
```bash
|
||||
# Validate configuration
|
||||
docker-compose -f compose.yml -f compose.production.yml --env-file .env.production config
|
||||
|
||||
# Deploy
|
||||
docker-compose -f compose.yml -f compose.production.yml --env-file .env.production up -d
|
||||
```
|
||||
|
||||
## Best Practices
|
||||
|
||||
### Local Development
|
||||
1. Use default credentials (they're fine for local)
|
||||
2. Keep `EXTENSIONS_AUTO_RELOAD=true` for quick iteration
|
||||
3. Run frontend via `pnpm dev` for hot reload
|
||||
4. Restart Directus after bundle changes
|
||||
|
||||
### Production
|
||||
1. Use strong passwords for database and admin
|
||||
2. Set `EXTENSIONS_AUTO_RELOAD=false` for stability
|
||||
3. Use GHCR images for frontend
|
||||
4. Enable Gzip compression via Traefik
|
||||
5. Monitor logs regularly
|
||||
6. Keep backups of uploads and database
|
||||
|
||||
## See Also
|
||||
|
||||
- [DOCKER.md](DOCKER.md) - Docker image documentation
|
||||
- [QUICKSTART.md](QUICKSTART.md) - Quick start guide
|
||||
- [CLAUDE.md](CLAUDE.md) - Development guide
|
||||
@@ -137,7 +137,8 @@ sexy.pivoine.art/
|
||||
| Document | Purpose | Emoji |
|
||||
|----------|---------|-------|
|
||||
| [QUICKSTART.md](QUICKSTART.md) | Get wet... I mean, get started! | 💦 |
|
||||
| [DOCKER.md](DOCKER.md) | Complete deployment manual | 🐳 |
|
||||
| [COMPOSE.md](COMPOSE.md) | Docker Compose setup guide | 🐳 |
|
||||
| [DOCKER.md](DOCKER.md) | Standalone Docker deployment | 🐋 |
|
||||
| [CLAUDE.md](CLAUDE.md) | Architecture & development | 🤖 |
|
||||
| [.github/workflows/README.md](.github/workflows/README.md) | CI/CD workflows | ⚙️ |
|
||||
|
||||
@@ -179,15 +180,15 @@ docker buildx build --platform linux/amd64,linux/arm64 -t sexy.pivoine.art:lates
|
||||
|
||||
## 🚀 Deployment — Share Your Creation
|
||||
|
||||
### Production with Docker
|
||||
### Production with Docker Compose
|
||||
|
||||
```bash
|
||||
# Configure your secrets
|
||||
cp .env.production.example .env.production
|
||||
# Edit .env.production with your intimate details
|
||||
|
||||
# Deploy with grace
|
||||
docker-compose -f docker-compose.production.yml up -d
|
||||
# Deploy with grace (uses Traefik for routing)
|
||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
||||
```
|
||||
|
||||
### Production without Docker
|
||||
|
||||
265
REBUILD_GUIDE.md
Normal file
265
REBUILD_GUIDE.md
Normal file
@@ -0,0 +1,265 @@
|
||||
# 🔄 Rebuild Guide - When You Need to Rebuild the Image
|
||||
|
||||
## Why Rebuild?
|
||||
|
||||
SvelteKit's `PUBLIC_*` environment variables are **baked into the JavaScript** at build time. You need to rebuild when:
|
||||
|
||||
1. ✅ Changing `PUBLIC_API_URL`
|
||||
2. ✅ Changing `PUBLIC_URL`
|
||||
3. ✅ Changing `PUBLIC_UMAMI_ID`
|
||||
4. ✅ Changing any `LETTERSPACE_*` variables
|
||||
5. ❌ NOT needed for Directus env vars (those are runtime)
|
||||
|
||||
## Quick Rebuild Process
|
||||
|
||||
### 1. Update Frontend Environment Variables
|
||||
|
||||
Edit the frontend `.env` file:
|
||||
|
||||
```bash
|
||||
nano packages/frontend/.env
|
||||
```
|
||||
|
||||
Set your production values:
|
||||
```bash
|
||||
PUBLIC_API_URL=https://sexy.pivoine.art/api
|
||||
PUBLIC_URL=https://sexy.pivoine.art
|
||||
PUBLIC_UMAMI_ID=your-umami-id
|
||||
LETTERSPACE_API_URL=https://api.letterspace.com/v1
|
||||
LETTERSPACE_API_KEY=your-key
|
||||
LETTERSPACE_LIST_ID=your-list-id
|
||||
```
|
||||
|
||||
### 2. Rebuild the Image
|
||||
|
||||
```bash
|
||||
# From the project root
|
||||
docker build -t ghcr.io/valknarxxx/sexy:latest -t sexy.pivoine.art:latest .
|
||||
```
|
||||
|
||||
**Expected Time:** 30-45 minutes (first build), 10-15 minutes (cached rebuild)
|
||||
|
||||
### 3. Restart Services
|
||||
|
||||
```bash
|
||||
# If using docker-compose
|
||||
cd /home/valknar/Projects/docker-compose/sexy
|
||||
docker compose down
|
||||
docker compose up -d
|
||||
|
||||
# Or directly
|
||||
docker stop sexy_frontend
|
||||
docker rm sexy_frontend
|
||||
docker compose up -d frontend
|
||||
```
|
||||
|
||||
## Monitoring the Build
|
||||
|
||||
### Check Build Progress
|
||||
|
||||
```bash
|
||||
# Watch build output
|
||||
docker build -t ghcr.io/valknarxxx/sexy:latest .
|
||||
|
||||
# Build stages:
|
||||
# 1. Base (~30s) - Node.js setup
|
||||
# 2. Builder (~25-40min) - Rust + WASM + packages
|
||||
# - Rust installation: ~2-3 min
|
||||
# - wasm-bindgen-cli: ~10-15 min
|
||||
# - WASM build: ~5-10 min
|
||||
# - Package builds: ~5-10 min
|
||||
# 3. Runner (~2min) - Final image assembly
|
||||
```
|
||||
|
||||
### Verify Environment Variables in Built Image
|
||||
|
||||
```bash
|
||||
# Check what PUBLIC_API_URL is baked in
|
||||
docker run --rm ghcr.io/valknarxxx/sexy:latest sh -c \
|
||||
"grep -r 'PUBLIC_API_URL' /home/node/app/packages/frontend/build/ | head -3"
|
||||
|
||||
# Should show: https://sexy.pivoine.art/api
|
||||
```
|
||||
|
||||
## Push to GitHub Container Registry
|
||||
|
||||
After successful build:
|
||||
|
||||
```bash
|
||||
# Login to GHCR (first time only)
|
||||
echo $GITHUB_TOKEN | docker login ghcr.io -u valknarxxx --password-stdin
|
||||
|
||||
# Push the image
|
||||
docker push ghcr.io/valknarxxx/sexy:latest
|
||||
```
|
||||
|
||||
## Alternative: Build Arguments (Future Enhancement)
|
||||
|
||||
To avoid rebuilding for every env change, consider adding build arguments:
|
||||
|
||||
```dockerfile
|
||||
# In Dockerfile, before building frontend:
|
||||
ARG PUBLIC_API_URL=https://sexy.pivoine.art/api
|
||||
ARG PUBLIC_URL=https://sexy.pivoine.art
|
||||
ARG PUBLIC_UMAMI_ID=
|
||||
|
||||
# Create .env.production dynamically
|
||||
RUN echo "PUBLIC_API_URL=${PUBLIC_API_URL}" > packages/frontend/.env.production && \
|
||||
echo "PUBLIC_URL=${PUBLIC_URL}" >> packages/frontend/.env.production && \
|
||||
echo "PUBLIC_UMAMI_ID=${PUBLIC_UMAMI_ID}" >> packages/frontend/.env.production
|
||||
```
|
||||
|
||||
Then build with:
|
||||
```bash
|
||||
docker build \
|
||||
--build-arg PUBLIC_API_URL=https://sexy.pivoine.art/api \
|
||||
--build-arg PUBLIC_URL=https://sexy.pivoine.art \
|
||||
-t ghcr.io/valknarxxx/sexy:latest .
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Build Fails at Rust Installation
|
||||
|
||||
```bash
|
||||
# Check network connectivity
|
||||
ping -c 3 sh.rustup.rs
|
||||
|
||||
# Build with verbose output
|
||||
docker build --progress=plain -t ghcr.io/valknarxxx/sexy:latest .
|
||||
```
|
||||
|
||||
### Build Fails at WASM
|
||||
|
||||
```bash
|
||||
# Check if wasm-bindgen-cli matches package.json version
|
||||
docker run --rm rust:latest cargo install wasm-bindgen-cli --version 0.2.103
|
||||
```
|
||||
|
||||
### Frontend Still Shows Wrong URL
|
||||
|
||||
```bash
|
||||
# Verify .env file is correct
|
||||
cat packages/frontend/.env
|
||||
|
||||
# Check if old image is cached
|
||||
docker images | grep sexy
|
||||
docker rmi ghcr.io/valknarxxx/sexy:old-tag
|
||||
|
||||
# Force rebuild without cache
|
||||
docker build --no-cache -t ghcr.io/valknarxxx/sexy:latest .
|
||||
```
|
||||
|
||||
### Container Starts But Can't Connect to API
|
||||
|
||||
1. Check Traefik routing:
|
||||
```bash
|
||||
docker logs traefik | grep sexy
|
||||
```
|
||||
|
||||
2. Check if Directus is accessible:
|
||||
```bash
|
||||
curl -I https://sexy.pivoine.art/api/server/health
|
||||
```
|
||||
|
||||
3. Check frontend logs:
|
||||
```bash
|
||||
docker logs sexy_frontend
|
||||
```
|
||||
|
||||
## Development vs Production
|
||||
|
||||
### Development (Local)
|
||||
- Use `pnpm dev` for hot reload
|
||||
- No rebuild needed for code changes
|
||||
- Env vars from `.env` or shell
|
||||
|
||||
### Production (Docker)
|
||||
- Rebuild required for PUBLIC_* changes
|
||||
- Changes baked into JavaScript
|
||||
- Env vars from `packages/frontend/.env`
|
||||
|
||||
## Optimization Tips
|
||||
|
||||
### Speed Up Rebuilds
|
||||
|
||||
1. **Use BuildKit cache:**
|
||||
```bash
|
||||
export DOCKER_BUILDKIT=1
|
||||
docker build --build-arg BUILDKIT_INLINE_CACHE=1 -t ghcr.io/valknarxxx/sexy:latest .
|
||||
```
|
||||
|
||||
2. **Multi-stage caching:**
|
||||
- Dockerfile already optimized with multi-stage build
|
||||
- Dependencies cached separately from code
|
||||
|
||||
3. **Parallel builds:**
|
||||
```bash
|
||||
# Build with more CPU cores
|
||||
docker build --cpus 4 -t ghcr.io/valknarxxx/sexy:latest .
|
||||
```
|
||||
|
||||
### Reduce Image Size
|
||||
|
||||
Current optimizations:
|
||||
- ✅ Multi-stage build
|
||||
- ✅ Production dependencies only
|
||||
- ✅ Minimal base image
|
||||
- ✅ No dev tools in final image
|
||||
|
||||
Expected sizes:
|
||||
- Base: ~100MB
|
||||
- Builder: ~2-3GB (not shipped)
|
||||
- Runner: ~300-500MB (final)
|
||||
|
||||
## Automation
|
||||
|
||||
### GitHub Actions (Already Set Up)
|
||||
|
||||
The `.github/workflows/docker-build-push.yml` automatically:
|
||||
1. Builds on push to main
|
||||
2. Creates version tags
|
||||
3. Pushes to GHCR
|
||||
4. Caches layers for faster builds
|
||||
|
||||
**Trigger a rebuild:**
|
||||
```bash
|
||||
git tag v1.0.1
|
||||
git push origin v1.0.1
|
||||
```
|
||||
|
||||
### Local Build Script
|
||||
|
||||
Use the provided `build.sh`:
|
||||
```bash
|
||||
./build.sh -t v1.0.0 -p
|
||||
```
|
||||
|
||||
## When NOT to Rebuild
|
||||
|
||||
You DON'T need to rebuild for:
|
||||
- ❌ Directus configuration changes
|
||||
- ❌ Database credentials
|
||||
- ❌ Redis settings
|
||||
- ❌ SMTP settings
|
||||
- ❌ Session cookie settings
|
||||
- ❌ Traefik labels
|
||||
|
||||
These are runtime environment variables and can be changed in docker-compose.
|
||||
|
||||
## Summary
|
||||
|
||||
| Change | Rebuild Needed | How to Apply |
|
||||
|--------|----------------|--------------|
|
||||
| `PUBLIC_API_URL` | ✅ Yes | Rebuild image |
|
||||
| `PUBLIC_URL` | ✅ Yes | Rebuild image |
|
||||
| `PUBLIC_UMAMI_ID` | ✅ Yes | Rebuild image |
|
||||
| `LETTERSPACE_*` | ✅ Yes | Rebuild image |
|
||||
| `SEXY_DIRECTUS_*` | ❌ No | Restart container |
|
||||
| `DB_*` | ❌ No | Restart container |
|
||||
| `EMAIL_*` | ❌ No | Restart container |
|
||||
| Traefik labels | ❌ No | Restart container |
|
||||
|
||||
---
|
||||
|
||||
**Remember:** The key difference is **build-time** (compiled into JS) vs **runtime** (read from environment).
|
||||
130
compose.production.yml
Normal file
130
compose.production.yml
Normal file
@@ -0,0 +1,130 @@
|
||||
include:
|
||||
- compose.yml
|
||||
|
||||
# Production compose file - extends base compose.yml
|
||||
# Usage: docker-compose -f compose.production.yml up -d
|
||||
|
||||
networks:
|
||||
compose_network:
|
||||
external: true
|
||||
name: compose_network
|
||||
|
||||
services:
|
||||
# Disable local postgres for production (use external DB)
|
||||
postgres:
|
||||
deploy:
|
||||
replicas: 0
|
||||
|
||||
# Disable local redis for production (use external Redis)
|
||||
redis:
|
||||
deploy:
|
||||
replicas: 0
|
||||
|
||||
# Override Directus for production
|
||||
directus:
|
||||
networks:
|
||||
- compose_network
|
||||
ports: [] # Remove exposed ports, use Traefik instead
|
||||
|
||||
# Override volumes for production paths
|
||||
volumes:
|
||||
- ${SEXY_DIRECTUS_UPLOADS:-./uploads}:/directus/uploads
|
||||
- ${SEXY_DIRECTUS_BUNDLE:-./packages/bundle/dist}:/directus/extensions/sexy.pivoine.art
|
||||
|
||||
# Override environment for production settings
|
||||
environment:
|
||||
# Database (external)
|
||||
DB_HOST: ${CORE_DB_HOST}
|
||||
DB_PORT: ${CORE_DB_PORT:-5432}
|
||||
DB_DATABASE: ${SEXY_DB_NAME}
|
||||
DB_USER: ${DB_USER}
|
||||
DB_PASSWORD: ${DB_PASSWORD}
|
||||
|
||||
# General
|
||||
SECRET: ${SEXY_DIRECTUS_SECRET}
|
||||
ADMIN_EMAIL: ${ADMIN_EMAIL}
|
||||
ADMIN_PASSWORD: ${ADMIN_PASSWORD}
|
||||
PUBLIC_URL: ${SEXY_PUBLIC_URL}
|
||||
|
||||
# Cache (external Redis)
|
||||
REDIS: redis://${CORE_REDIS_HOST}:${CORE_REDIS_PORT:-6379}
|
||||
|
||||
# CORS
|
||||
CORS_ORIGIN: ${SEXY_CORS_ORIGIN}
|
||||
|
||||
# Security (production settings)
|
||||
SESSION_COOKIE_SECURE: ${SEXY_SESSION_COOKIE_SECURE:-true}
|
||||
SESSION_COOKIE_SAME_SITE: ${SEXY_SESSION_COOKIE_SAME_SITE:-strict}
|
||||
SESSION_COOKIE_DOMAIN: ${SEXY_SESSION_COOKIE_DOMAIN}
|
||||
|
||||
# Extensions
|
||||
EXTENSIONS_AUTO_RELOAD: ${SEXY_EXTENSIONS_AUTO_RELOAD:-false}
|
||||
|
||||
# Email (production SMTP)
|
||||
EMAIL_TRANSPORT: ${EMAIL_TRANSPORT:-smtp}
|
||||
EMAIL_FROM: ${EMAIL_FROM}
|
||||
EMAIL_SMTP_HOST: ${EMAIL_SMTP_HOST}
|
||||
EMAIL_SMTP_PORT: ${EMAIL_SMTP_PORT:-587}
|
||||
EMAIL_SMTP_USER: ${EMAIL_SMTP_USER}
|
||||
EMAIL_SMTP_PASSWORD: ${EMAIL_SMTP_PASSWORD}
|
||||
|
||||
# User URLs
|
||||
USER_REGISTER_URL_ALLOW_LIST: ${SEXY_USER_REGISTER_URL_ALLOW_LIST}
|
||||
PASSWORD_RESET_URL_ALLOW_LIST: ${SEXY_PASSWORD_RESET_URL_ALLOW_LIST}
|
||||
|
||||
# Remove local dependencies
|
||||
depends_on: []
|
||||
|
||||
labels:
|
||||
# Traefik labels for reverse proxy
|
||||
- 'traefik.enable=${SEXY_TRAEFIK_ENABLED:-true}'
|
||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-redirect-web-secure.redirectscheme.scheme=https'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-redirect-web-secure'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web.rule=Host(`${SEXY_TRAEFIK_HOST}`) && PathPrefix(`/api`)'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web.entrypoints=web'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.rule=Host(`${SEXY_TRAEFIK_HOST}`) && PathPrefix(`/api`)'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.tls.certresolver=resolver'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.entrypoints=web-secure'
|
||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure-compress.compress=true'
|
||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-strip.stripprefix.prefixes=/api'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-strip,${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure-compress'
|
||||
- 'traefik.http.services.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.loadbalancer.server.port=8055'
|
||||
- 'traefik.docker.network=compose_network'
|
||||
|
||||
# Override Frontend for production
|
||||
frontend:
|
||||
networks:
|
||||
- compose_network
|
||||
ports: [] # Remove exposed ports, use Traefik instead
|
||||
|
||||
# Override environment for production
|
||||
environment:
|
||||
NODE_ENV: production
|
||||
PUBLIC_API_URL: ${SEXY_FRONTEND_PUBLIC_API_URL}
|
||||
PUBLIC_URL: ${SEXY_FRONTEND_PUBLIC_URL}
|
||||
PUBLIC_UMAMI_ID: ${SEXY_FRONTEND_PUBLIC_UMAMI_ID:-}
|
||||
LETTERSPACE_API_URL: ${SEXY_FRONTEND_LETTERSPACE_API_URL:-}
|
||||
LETTERSPACE_API_KEY: ${SEXY_FRONTEND_LETTERSPACE_API_KEY:-}
|
||||
LETTERSPACE_LIST_ID: ${SEXY_FRONTEND_LETTERSPACE_LIST_ID:-}
|
||||
|
||||
# Override volume for production path
|
||||
volumes:
|
||||
- ${SEXY_FRONTEND_PATH:-/var/www/sexy.pivoine.art}:/home/node/app
|
||||
|
||||
# Remove local dependency
|
||||
depends_on: []
|
||||
|
||||
labels:
|
||||
# Traefik labels for reverse proxy
|
||||
- 'traefik.enable=${SEXY_TRAEFIK_ENABLED:-true}'
|
||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-redirect-web-secure.redirectscheme.scheme=https'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-redirect-web-secure'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web.rule=Host(`${SEXY_TRAEFIK_HOST}`)'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web.entrypoints=web'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.rule=Host(`${SEXY_TRAEFIK_HOST}`)'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.tls.certresolver=resolver'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.entrypoints=web-secure'
|
||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure-compress.compress=true'
|
||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure-compress'
|
||||
- 'traefik.http.services.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.loadbalancer.server.port=3000'
|
||||
- 'traefik.docker.network=compose_network'
|
||||
183
compose.yml
Normal file
183
compose.yml
Normal file
@@ -0,0 +1,183 @@
|
||||
services:
|
||||
# PostgreSQL Database (local only)
|
||||
postgres:
|
||||
image: postgres:16-alpine
|
||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_postgres
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- sexy-network
|
||||
volumes:
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_DB: ${DB_DATABASE:-sexy}
|
||||
POSTGRES_USER: ${DB_USER:-sexy}
|
||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-sexy}
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-sexy}"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# Redis Cache (local only)
|
||||
redis:
|
||||
image: redis:7-alpine
|
||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_redis
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- sexy-network
|
||||
volumes:
|
||||
- redis-data:/data
|
||||
command: redis-server --appendonly yes
|
||||
healthcheck:
|
||||
test: ["CMD", "redis-cli", "ping"]
|
||||
interval: 10s
|
||||
timeout: 5s
|
||||
retries: 5
|
||||
|
||||
# Directus CMS
|
||||
directus:
|
||||
image: ${SEXY_DIRECTUS_IMAGE:-directus/directus:11}
|
||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_api
|
||||
restart: unless-stopped
|
||||
networks:
|
||||
- sexy-network
|
||||
ports:
|
||||
- "8055:8055"
|
||||
volumes:
|
||||
- directus-uploads:/directus/uploads
|
||||
- ${SEXY_DIRECTUS_BUNDLE:-./packages/bundle/dist}:/directus/extensions/sexy.pivoine.art
|
||||
environment:
|
||||
# Database
|
||||
DB_CLIENT: pg
|
||||
DB_HOST: ${CORE_DB_HOST:-postgres}
|
||||
DB_PORT: ${CORE_DB_PORT:-5432}
|
||||
DB_DATABASE: ${SEXY_DB_NAME:-sexy}
|
||||
DB_USER: ${DB_USER:-sexy}
|
||||
DB_PASSWORD: ${DB_PASSWORD:-sexy}
|
||||
|
||||
# General
|
||||
SECRET: ${SEXY_DIRECTUS_SECRET:-replace-with-random-secret-min-32-chars}
|
||||
ADMIN_EMAIL: ${ADMIN_EMAIL:-admin@sexy.pivoine.art}
|
||||
ADMIN_PASSWORD: ${ADMIN_PASSWORD:-admin}
|
||||
PUBLIC_URL: ${SEXY_PUBLIC_URL:-http://localhost:8055}
|
||||
|
||||
# Cache
|
||||
CACHE_ENABLED: ${SEXY_CACHE_ENABLED:-true}
|
||||
CACHE_AUTO_PURGE: ${SEXY_CACHE_AUTO_PURGE:-true}
|
||||
CACHE_STORE: redis
|
||||
REDIS: redis://${CORE_REDIS_HOST:-redis}:${CORE_REDIS_PORT:-6379}
|
||||
|
||||
# CORS
|
||||
CORS_ENABLED: ${SEXY_CORS_ENABLED:-true}
|
||||
CORS_ORIGIN: ${SEXY_CORS_ORIGIN:-http://localhost:3000}
|
||||
|
||||
# Security
|
||||
SESSION_COOKIE_SECURE: ${SEXY_SESSION_COOKIE_SECURE:-false}
|
||||
SESSION_COOKIE_SAME_SITE: ${SEXY_SESSION_COOKIE_SAME_SITE:-lax}
|
||||
SESSION_COOKIE_DOMAIN: ${SEXY_SESSION_COOKIE_DOMAIN:-localhost}
|
||||
|
||||
# Extensions
|
||||
EXTENSIONS_PATH: ${SEXY_EXTENSIONS_PATH:-/directus/extensions}
|
||||
EXTENSIONS_AUTO_RELOAD: ${SEXY_EXTENSIONS_AUTO_RELOAD:-true}
|
||||
|
||||
# WebSockets
|
||||
WEBSOCKETS_ENABLED: ${SEXY_WEBSOCKETS_ENABLED:-true}
|
||||
|
||||
# Email (optional for local dev)
|
||||
EMAIL_TRANSPORT: ${EMAIL_TRANSPORT:-sendmail}
|
||||
EMAIL_FROM: ${EMAIL_FROM:-noreply@sexy.pivoine.art}
|
||||
EMAIL_SMTP_HOST: ${EMAIL_SMTP_HOST:-}
|
||||
EMAIL_SMTP_PORT: ${EMAIL_SMTP_PORT:-587}
|
||||
EMAIL_SMTP_USER: ${EMAIL_SMTP_USER:-}
|
||||
EMAIL_SMTP_PASSWORD: ${EMAIL_SMTP_PASSWORD:-}
|
||||
|
||||
# User Registration & Password Reset URLs
|
||||
USER_REGISTER_URL_ALLOW_LIST: ${SEXY_USER_REGISTER_URL_ALLOW_LIST:-http://localhost:3000}
|
||||
PASSWORD_RESET_URL_ALLOW_LIST: ${SEXY_PASSWORD_RESET_URL_ALLOW_LIST:-http://localhost:3000}
|
||||
|
||||
# Content Security Policy
|
||||
CONTENT_SECURITY_POLICY_DIRECTIVES__FRAME_SRC: ${SEXY_CONTENT_SECURITY_POLICY_DIRECTIVES__FRAME_SRC:-}
|
||||
|
||||
# Timezone
|
||||
TZ: ${TIMEZONE:-Europe/Amsterdam}
|
||||
|
||||
depends_on:
|
||||
postgres:
|
||||
condition: service_healthy
|
||||
redis:
|
||||
condition: service_healthy
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8055/server/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
# Frontend (local development - optional, usually run via pnpm dev)
|
||||
frontend:
|
||||
image: ${SEXY_FRONTEND_IMAGE:-ghcr.io/valknarxxx/sexy:latest}
|
||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_frontend
|
||||
restart: unless-stopped
|
||||
user: node
|
||||
working_dir: /home/node/app/packages/frontend
|
||||
networks:
|
||||
- sexy-network
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
# Node
|
||||
NODE_ENV: ${NODE_ENV:-development}
|
||||
PORT: 3000
|
||||
HOST: 0.0.0.0
|
||||
|
||||
# Public environment variables
|
||||
PUBLIC_API_URL: ${SEXY_FRONTEND_PUBLIC_API_URL:-http://localhost:8055}
|
||||
PUBLIC_URL: ${SEXY_FRONTEND_PUBLIC_URL:-http://localhost:3000}
|
||||
PUBLIC_UMAMI_ID: ${SEXY_FRONTEND_PUBLIC_UMAMI_ID:-}
|
||||
|
||||
# Letterspace newsletter integration
|
||||
LETTERSPACE_API_URL: ${SEXY_FRONTEND_LETTERSPACE_API_URL:-}
|
||||
LETTERSPACE_API_KEY: ${SEXY_FRONTEND_LETTERSPACE_API_KEY:-}
|
||||
LETTERSPACE_LIST_ID: ${SEXY_FRONTEND_LETTERSPACE_LIST_ID:-}
|
||||
|
||||
# Timezone
|
||||
TZ: ${TIMEZONE:-Europe/Amsterdam}
|
||||
|
||||
volumes:
|
||||
- ${SEXY_FRONTEND_PATH:-./}:/home/node/app
|
||||
|
||||
command: ["node", "build/index.js"]
|
||||
|
||||
depends_on:
|
||||
- directus
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "require('http').get('http://localhost:3000/', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
# Uncomment to run frontend in development mode with live reload
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: Dockerfile
|
||||
# volumes:
|
||||
# - ./packages/frontend:/home/node/app/packages/frontend
|
||||
# - /home/node/app/packages/frontend/node_modules
|
||||
# environment:
|
||||
# NODE_ENV: development
|
||||
|
||||
networks:
|
||||
sexy-network:
|
||||
driver: bridge
|
||||
name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_network
|
||||
|
||||
volumes:
|
||||
directus-uploads:
|
||||
driver: local
|
||||
postgres-data:
|
||||
driver: local
|
||||
redis-data:
|
||||
driver: local
|
||||
@@ -1,71 +0,0 @@
|
||||
version: '3.8'
|
||||
|
||||
services:
|
||||
frontend:
|
||||
# Use pre-built image from GitHub Container Registry
|
||||
# To build locally instead, uncomment the 'build' section and comment out 'image'
|
||||
image: ghcr.io/valknarxxx/sexy:latest
|
||||
|
||||
# Uncomment to build locally:
|
||||
# build:
|
||||
# context: .
|
||||
# dockerfile: Dockerfile
|
||||
# args:
|
||||
# NODE_ENV: production
|
||||
container_name: sexy-pivoine-frontend
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "3000:3000"
|
||||
environment:
|
||||
# Application settings
|
||||
NODE_ENV: production
|
||||
PORT: 3000
|
||||
HOST: 0.0.0.0
|
||||
|
||||
# Public environment variables
|
||||
PUBLIC_API_URL: ${PUBLIC_API_URL:-http://localhost:8055}
|
||||
PUBLIC_URL: ${PUBLIC_URL:-http://localhost:3000}
|
||||
PUBLIC_UMAMI_ID: ${PUBLIC_UMAMI_ID:-}
|
||||
|
||||
# Letterspace integration
|
||||
LETTERSPACE_API_URL: ${LETTERSPACE_API_URL:-}
|
||||
LETTERSPACE_API_KEY: ${LETTERSPACE_API_KEY:-}
|
||||
LETTERSPACE_LIST_ID: ${LETTERSPACE_LIST_ID:-}
|
||||
|
||||
networks:
|
||||
- sexy-network
|
||||
|
||||
healthcheck:
|
||||
test: ["CMD", "node", "-e", "require('http').get('http://localhost:3000/', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"]
|
||||
interval: 30s
|
||||
timeout: 3s
|
||||
retries: 3
|
||||
start_period: 40s
|
||||
|
||||
# Resource limits
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: '2'
|
||||
memory: 2G
|
||||
reservations:
|
||||
cpus: '0.5'
|
||||
memory: 512M
|
||||
|
||||
# Logging
|
||||
logging:
|
||||
driver: "json-file"
|
||||
options:
|
||||
max-size: "10m"
|
||||
max-file: "3"
|
||||
|
||||
networks:
|
||||
sexy-network:
|
||||
driver: bridge
|
||||
|
||||
# Example usage:
|
||||
# 1. Create a .env file with your environment variables
|
||||
# 2. Build: docker-compose -f docker-compose.production.yml build
|
||||
# 3. Run: docker-compose -f docker-compose.production.yml up -d
|
||||
# 4. Logs: docker-compose -f docker-compose.production.yml logs -f
|
||||
# 5. Stop: docker-compose -f docker-compose.production.yml down
|
||||
1
packages/frontend/src/app.d.ts
vendored
1
packages/frontend/src/app.d.ts
vendored
@@ -8,6 +8,7 @@ declare global {
|
||||
// interface Error {}
|
||||
interface Locals {
|
||||
authStatus: AuthStatus;
|
||||
requestId: string;
|
||||
}
|
||||
// interface PageData {}
|
||||
// interface PageState {}
|
||||
|
||||
@@ -1,27 +1,97 @@
|
||||
import { isAuthenticated } from "$lib/services";
|
||||
import { logger, generateRequestId } from "$lib/logger";
|
||||
import type { Handle } from "@sveltejs/kit";
|
||||
|
||||
export async function handle({ event, resolve }) {
|
||||
const { cookies, locals } = event;
|
||||
// Log startup info once
|
||||
let hasLoggedStartup = false;
|
||||
if (!hasLoggedStartup) {
|
||||
logger.startup();
|
||||
hasLoggedStartup = true;
|
||||
}
|
||||
|
||||
export const handle: Handle = async ({ event, resolve }) => {
|
||||
const { cookies, locals, url, request } = event;
|
||||
const startTime = Date.now();
|
||||
|
||||
// Generate unique request ID
|
||||
const requestId = generateRequestId();
|
||||
|
||||
// Add request ID to locals for access in other handlers
|
||||
locals.requestId = requestId;
|
||||
|
||||
// Log incoming request
|
||||
logger.request(request.method, url.pathname, {
|
||||
requestId,
|
||||
context: {
|
||||
userAgent: request.headers.get('user-agent')?.substring(0, 100),
|
||||
referer: request.headers.get('referer'),
|
||||
ip: request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip'),
|
||||
},
|
||||
});
|
||||
|
||||
// Handle authentication
|
||||
const token = cookies.get("directus_session_token");
|
||||
|
||||
if (token) {
|
||||
locals.authStatus = await isAuthenticated(token);
|
||||
// if (locals.authStatus.authenticated) {
|
||||
// cookies.set('directus_refresh_token', locals.authStatus.data!.refresh_token!, {
|
||||
// httpOnly: true,
|
||||
// secure: true,
|
||||
// domain: '.pivoine.art',
|
||||
// path: '/'
|
||||
// })
|
||||
// }
|
||||
try {
|
||||
locals.authStatus = await isAuthenticated(token);
|
||||
|
||||
if (locals.authStatus.authenticated) {
|
||||
logger.auth('Token validated', true, {
|
||||
requestId,
|
||||
userId: locals.authStatus.user?.id,
|
||||
context: {
|
||||
email: locals.authStatus.user?.email,
|
||||
role: locals.authStatus.user?.role?.name,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
logger.auth('Token invalid', false, { requestId });
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Authentication check failed', {
|
||||
requestId,
|
||||
error: error instanceof Error ? error : new Error(String(error)),
|
||||
});
|
||||
locals.authStatus = { authenticated: false };
|
||||
}
|
||||
} else {
|
||||
logger.debug('No session token found', { requestId });
|
||||
locals.authStatus = { authenticated: false };
|
||||
}
|
||||
|
||||
return await resolve(event, {
|
||||
filterSerializedResponseHeaders: (key) => {
|
||||
return key.toLowerCase() === "content-type";
|
||||
// Resolve the request
|
||||
let response: Response;
|
||||
try {
|
||||
response = await resolve(event, {
|
||||
filterSerializedResponseHeaders: (key) => {
|
||||
return key.toLowerCase() === "content-type";
|
||||
},
|
||||
});
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
logger.error('Request handler error', {
|
||||
requestId,
|
||||
method: request.method,
|
||||
path: url.pathname,
|
||||
duration,
|
||||
error: error instanceof Error ? error : new Error(String(error)),
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Log response
|
||||
const duration = Date.now() - startTime;
|
||||
logger.response(request.method, url.pathname, response.status, duration, {
|
||||
requestId,
|
||||
userId: locals.authStatus.authenticated ? locals.authStatus.user?.id : undefined,
|
||||
context: {
|
||||
cached: response.headers.get('x-sveltekit-page') === 'true',
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Add request ID to response headers (useful for debugging)
|
||||
response.headers.set('x-request-id', requestId);
|
||||
|
||||
return response;
|
||||
};
|
||||
|
||||
148
packages/frontend/src/lib/logger.ts
Normal file
148
packages/frontend/src/lib/logger.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Server-side logging utility for sexy.pivoine.art
|
||||
* Provides structured logging with context and request tracing
|
||||
*/
|
||||
|
||||
export type LogLevel = 'debug' | 'info' | 'warn' | 'error';
|
||||
|
||||
interface LogContext {
|
||||
timestamp: string;
|
||||
level: LogLevel;
|
||||
message: string;
|
||||
context?: Record<string, unknown>;
|
||||
requestId?: string;
|
||||
userId?: string;
|
||||
path?: string;
|
||||
method?: string;
|
||||
duration?: number;
|
||||
error?: Error;
|
||||
}
|
||||
|
||||
class Logger {
|
||||
private isDev = process.env.NODE_ENV === 'development';
|
||||
private serviceName = 'sexy.pivoine.art';
|
||||
|
||||
private formatLog(ctx: LogContext): string {
|
||||
const { timestamp, level, message, context, requestId, userId, path, method, duration, error } = ctx;
|
||||
|
||||
const parts = [
|
||||
`[${timestamp}]`,
|
||||
`[${level.toUpperCase()}]`,
|
||||
requestId ? `[${requestId}]` : null,
|
||||
method && path ? `${method} ${path}` : null,
|
||||
message,
|
||||
userId ? `user=${userId}` : null,
|
||||
duration !== undefined ? `${duration}ms` : null,
|
||||
].filter(Boolean);
|
||||
|
||||
let logString = parts.join(' ');
|
||||
|
||||
if (context && Object.keys(context).length > 0) {
|
||||
logString += ' ' + JSON.stringify(context);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
logString += `\n Error: ${error.message}\n Stack: ${error.stack}`;
|
||||
}
|
||||
|
||||
return logString;
|
||||
}
|
||||
|
||||
private log(level: LogLevel, message: string, meta: Partial<LogContext> = {}) {
|
||||
const timestamp = new Date().toISOString();
|
||||
const logContext: LogContext = {
|
||||
timestamp,
|
||||
level,
|
||||
message,
|
||||
...meta,
|
||||
};
|
||||
|
||||
const formattedLog = this.formatLog(logContext);
|
||||
|
||||
switch (level) {
|
||||
case 'debug':
|
||||
if (this.isDev) console.debug(formattedLog);
|
||||
break;
|
||||
case 'info':
|
||||
console.info(formattedLog);
|
||||
break;
|
||||
case 'warn':
|
||||
console.warn(formattedLog);
|
||||
break;
|
||||
case 'error':
|
||||
console.error(formattedLog);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
debug(message: string, meta?: Partial<LogContext>) {
|
||||
this.log('debug', message, meta);
|
||||
}
|
||||
|
||||
info(message: string, meta?: Partial<LogContext>) {
|
||||
this.log('info', message, meta);
|
||||
}
|
||||
|
||||
warn(message: string, meta?: Partial<LogContext>) {
|
||||
this.log('warn', message, meta);
|
||||
}
|
||||
|
||||
error(message: string, meta?: Partial<LogContext>) {
|
||||
this.log('error', message, meta);
|
||||
}
|
||||
|
||||
// Request logging helper
|
||||
request(
|
||||
method: string,
|
||||
path: string,
|
||||
meta: Partial<LogContext> = {}
|
||||
) {
|
||||
this.info('→ Request received', { method, path, ...meta });
|
||||
}
|
||||
|
||||
response(
|
||||
method: string,
|
||||
path: string,
|
||||
status: number,
|
||||
duration: number,
|
||||
meta: Partial<LogContext> = {}
|
||||
) {
|
||||
const level = status >= 500 ? 'error' : status >= 400 ? 'warn' : 'info';
|
||||
this.log(level, `← Response ${status}`, { method, path, duration, ...meta });
|
||||
}
|
||||
|
||||
// Authentication logging
|
||||
auth(action: string, success: boolean, meta: Partial<LogContext> = {}) {
|
||||
this.info(`🔐 Auth: ${action} ${success ? 'success' : 'failed'}`, meta);
|
||||
}
|
||||
|
||||
// Startup logging
|
||||
startup() {
|
||||
const env = {
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
PUBLIC_API_URL: process.env.PUBLIC_API_URL,
|
||||
PUBLIC_URL: process.env.PUBLIC_URL,
|
||||
PUBLIC_UMAMI_ID: process.env.PUBLIC_UMAMI_ID ? '***set***' : 'not set',
|
||||
LETTERSPACE_API_URL: process.env.LETTERSPACE_API_URL || 'not set',
|
||||
PORT: process.env.PORT || '3000',
|
||||
HOST: process.env.HOST || '0.0.0.0',
|
||||
};
|
||||
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('🍑 sexy.pivoine.art - Server Starting 💜');
|
||||
console.log('='.repeat(60));
|
||||
console.log('\n📋 Environment Configuration:');
|
||||
Object.entries(env).forEach(([key, value]) => {
|
||||
console.log(` ${key}: ${value}`);
|
||||
});
|
||||
console.log('\n' + '='.repeat(60) + '\n');
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
export const logger = new Logger();
|
||||
|
||||
// Generate request ID
|
||||
export function generateRequestId(): string {
|
||||
return `req_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
||||
}
|
||||
@@ -18,6 +18,32 @@ import {
|
||||
} from "@directus/sdk";
|
||||
import type { Article, Model, Stats, User, Video } from "$lib/types";
|
||||
import { PUBLIC_URL } from "$env/static/public";
|
||||
import { logger } from "$lib/logger";
|
||||
|
||||
// Helper to log API calls
|
||||
async function loggedApiCall<T>(
|
||||
operationName: string,
|
||||
operation: () => Promise<T>,
|
||||
context?: Record<string, unknown>
|
||||
): Promise<T> {
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
logger.debug(`🔄 API: ${operationName}`, { context });
|
||||
const result = await operation();
|
||||
const duration = Date.now() - startTime;
|
||||
logger.info(`✅ API: ${operationName} succeeded`, { duration, context });
|
||||
return result;
|
||||
} catch (error) {
|
||||
const duration = Date.now() - startTime;
|
||||
logger.error(`❌ API: ${operationName} failed`, {
|
||||
duration,
|
||||
context,
|
||||
error: error instanceof Error ? error : new Error(String(error)),
|
||||
});
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
const userFields = [
|
||||
"*",
|
||||
@@ -29,18 +55,24 @@ const userFields = [
|
||||
];
|
||||
|
||||
export async function isAuthenticated(token: string) {
|
||||
try {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
directus.setToken(token);
|
||||
const user = await directus.request(
|
||||
readMe({
|
||||
fields: userFields,
|
||||
}),
|
||||
);
|
||||
return { authenticated: true, user };
|
||||
} catch {
|
||||
return { authenticated: false };
|
||||
}
|
||||
return loggedApiCall(
|
||||
"isAuthenticated",
|
||||
async () => {
|
||||
try {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
directus.setToken(token);
|
||||
const user = await directus.request(
|
||||
readMe({
|
||||
fields: userFields,
|
||||
}),
|
||||
);
|
||||
return { authenticated: true, user };
|
||||
} catch {
|
||||
return { authenticated: false };
|
||||
}
|
||||
},
|
||||
{ hasToken: !!token },
|
||||
);
|
||||
}
|
||||
|
||||
export async function register(
|
||||
@@ -49,119 +81,167 @@ export async function register(
|
||||
firstName: string,
|
||||
lastName: string,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
registerUser(email, password, {
|
||||
verification_url: `${PUBLIC_URL || "http://localhost:3000"}/signup/verify`,
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
}),
|
||||
return loggedApiCall(
|
||||
"register",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
registerUser(email, password, {
|
||||
verification_url: `${PUBLIC_URL || "http://localhost:3000"}/signup/verify`,
|
||||
first_name: firstName,
|
||||
last_name: lastName,
|
||||
}),
|
||||
);
|
||||
},
|
||||
{ email, firstName, lastName },
|
||||
);
|
||||
}
|
||||
|
||||
export async function verify(token: string, fetch?: typeof globalThis.fetch) {
|
||||
const directus = fetch
|
||||
? getDirectusInstance((args) => fetch(args, { redirect: "manual" }))
|
||||
: getDirectusInstance(fetch);
|
||||
return directus.request(registerUserVerify(token));
|
||||
return loggedApiCall(
|
||||
"verify",
|
||||
async () => {
|
||||
const directus = fetch
|
||||
? getDirectusInstance((args) => fetch(args, { redirect: "manual" }))
|
||||
: getDirectusInstance(fetch);
|
||||
return directus.request(registerUserVerify(token));
|
||||
},
|
||||
{ hasToken: !!token },
|
||||
);
|
||||
}
|
||||
|
||||
export async function login(email: string, password: string) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.login({ email, password });
|
||||
return loggedApiCall(
|
||||
"login",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.login({ email, password });
|
||||
},
|
||||
{ email },
|
||||
);
|
||||
}
|
||||
|
||||
export async function logout() {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.logout();
|
||||
return loggedApiCall("logout", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.logout();
|
||||
});
|
||||
}
|
||||
|
||||
export async function requestPassword(email: string) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
passwordRequest(email, `${PUBLIC_URL || "http://localhost:3000"}/password/reset`),
|
||||
return loggedApiCall(
|
||||
"requestPassword",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
passwordRequest(email, `${PUBLIC_URL || "http://localhost:3000"}/password/reset`),
|
||||
);
|
||||
},
|
||||
{ email },
|
||||
);
|
||||
}
|
||||
|
||||
export async function resetPassword(token: string, password: string) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(passwordReset(token, password));
|
||||
return loggedApiCall(
|
||||
"resetPassword",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(passwordReset(token, password));
|
||||
},
|
||||
{ hasToken: !!token },
|
||||
);
|
||||
}
|
||||
|
||||
export async function getArticles(fetch?: typeof globalThis.fetch) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Article[]>(
|
||||
readItems("sexy_articles", {
|
||||
fields: ["*", "author.*"],
|
||||
where: { publish_date: { _lte: new Date().toISOString() } },
|
||||
sort: ["-publish_date"],
|
||||
}),
|
||||
);
|
||||
return loggedApiCall("getArticles", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Article[]>(
|
||||
readItems("sexy_articles", {
|
||||
fields: ["*", "author.*"],
|
||||
where: { publish_date: { _lte: new Date().toISOString() } },
|
||||
sort: ["-publish_date"],
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export async function getArticleBySlug(
|
||||
slug: string,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Article[]>(
|
||||
readItems("sexy_articles", {
|
||||
fields: ["*", "author.*"],
|
||||
filter: { slug: { _eq: slug } },
|
||||
}),
|
||||
)
|
||||
.then((articles) => {
|
||||
if (articles.length === 0) {
|
||||
throw new Error("Article not found");
|
||||
}
|
||||
return articles[0];
|
||||
});
|
||||
return loggedApiCall(
|
||||
"getArticleBySlug",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Article[]>(
|
||||
readItems("sexy_articles", {
|
||||
fields: ["*", "author.*"],
|
||||
filter: { slug: { _eq: slug } },
|
||||
}),
|
||||
)
|
||||
.then((articles) => {
|
||||
if (articles.length === 0) {
|
||||
throw new Error("Article not found");
|
||||
}
|
||||
return articles[0];
|
||||
});
|
||||
},
|
||||
{ slug },
|
||||
);
|
||||
}
|
||||
|
||||
export async function getVideos(fetch?: typeof globalThis.fetch) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
models: [
|
||||
"*",
|
||||
{
|
||||
directus_users_id: ["*"],
|
||||
},
|
||||
],
|
||||
},
|
||||
"movie.*",
|
||||
],
|
||||
filter: { upload_date: { _lte: new Date().toISOString() } },
|
||||
sort: ["-upload_date"],
|
||||
}),
|
||||
)
|
||||
.then((videos) => {
|
||||
videos.forEach((video) => {
|
||||
video.models = video.models.map((u) => u.directus_users_id!);
|
||||
return loggedApiCall("getVideos", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
models: [
|
||||
"*",
|
||||
{
|
||||
directus_users_id: ["*"],
|
||||
},
|
||||
],
|
||||
},
|
||||
"movie.*",
|
||||
],
|
||||
filter: { upload_date: { _lte: new Date().toISOString() } },
|
||||
sort: ["-upload_date"],
|
||||
}),
|
||||
)
|
||||
.then((videos) => {
|
||||
videos.forEach((video) => {
|
||||
video.models = video.models.map((u) => u.directus_users_id!);
|
||||
});
|
||||
return videos;
|
||||
});
|
||||
return videos;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export async function getVideosForModel(id, fetch?: typeof globalThis.fetch) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: ["*", "movie.*"],
|
||||
filter: {
|
||||
models: {
|
||||
directus_users_id: {
|
||||
id,
|
||||
return loggedApiCall(
|
||||
"getVideosForModel",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: ["*", "movie.*"],
|
||||
filter: {
|
||||
models: {
|
||||
directus_users_id: {
|
||||
id,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
sort: ["-upload_date"],
|
||||
}),
|
||||
sort: ["-upload_date"],
|
||||
}),
|
||||
);
|
||||
},
|
||||
{ modelId: id },
|
||||
);
|
||||
}
|
||||
|
||||
@@ -169,69 +249,81 @@ export async function getFeaturedVideos(
|
||||
limit: number,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
models: [
|
||||
return loggedApiCall(
|
||||
"getFeaturedVideos",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
directus_users_id: ["*"],
|
||||
models: [
|
||||
"*",
|
||||
{
|
||||
directus_users_id: ["*"],
|
||||
},
|
||||
],
|
||||
},
|
||||
"movie.*",
|
||||
],
|
||||
},
|
||||
"movie.*",
|
||||
],
|
||||
filter: {
|
||||
upload_date: { _lte: new Date().toISOString() },
|
||||
featured: true,
|
||||
},
|
||||
sort: ["-upload_date"],
|
||||
limit,
|
||||
}),
|
||||
)
|
||||
.then((videos) => {
|
||||
videos.forEach((video) => {
|
||||
video.models = video.models.map((u) => u.directus_users_id!);
|
||||
});
|
||||
return videos;
|
||||
});
|
||||
filter: {
|
||||
upload_date: { _lte: new Date().toISOString() },
|
||||
featured: true,
|
||||
},
|
||||
sort: ["-upload_date"],
|
||||
limit,
|
||||
}),
|
||||
)
|
||||
.then((videos) => {
|
||||
videos.forEach((video) => {
|
||||
video.models = video.models.map((u) => u.directus_users_id!);
|
||||
});
|
||||
return videos;
|
||||
});
|
||||
},
|
||||
{ limit },
|
||||
);
|
||||
}
|
||||
|
||||
export async function getVideoBySlug(
|
||||
slug: string,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
models: [
|
||||
return loggedApiCall(
|
||||
"getVideoBySlug",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Video[]>(
|
||||
readItems("sexy_videos", {
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
directus_users_id: ["*"],
|
||||
models: [
|
||||
"*",
|
||||
{
|
||||
directus_users_id: ["*"],
|
||||
},
|
||||
],
|
||||
},
|
||||
"movie.*",
|
||||
],
|
||||
},
|
||||
"movie.*",
|
||||
],
|
||||
filter: { slug },
|
||||
}),
|
||||
)
|
||||
.then((videos) => {
|
||||
if (videos.length === 0) {
|
||||
throw new Error("Video not found");
|
||||
}
|
||||
videos[0].models = videos[0].models.map((u) => u.directus_users_id!);
|
||||
filter: { slug },
|
||||
}),
|
||||
)
|
||||
.then((videos) => {
|
||||
if (videos.length === 0) {
|
||||
throw new Error("Video not found");
|
||||
}
|
||||
videos[0].models = videos[0].models.map((u) => u.directus_users_id!);
|
||||
|
||||
return videos[0];
|
||||
});
|
||||
return videos[0];
|
||||
});
|
||||
},
|
||||
{ slug },
|
||||
);
|
||||
}
|
||||
|
||||
const modelFilter = {
|
||||
@@ -256,28 +348,36 @@ const modelFilter = {
|
||||
};
|
||||
|
||||
export async function getModels(fetch?: typeof globalThis.fetch) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Model[]>(
|
||||
readUsers({
|
||||
fields: ["*"],
|
||||
filter: modelFilter,
|
||||
sort: ["-join_date"],
|
||||
}),
|
||||
);
|
||||
return loggedApiCall("getModels", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Model[]>(
|
||||
readUsers({
|
||||
fields: ["*"],
|
||||
filter: modelFilter,
|
||||
sort: ["-join_date"],
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export async function getFeaturedModels(
|
||||
limit = 3,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Model[]>(
|
||||
readUsers({
|
||||
fields: ["*"],
|
||||
filter: { _and: [modelFilter, { featured: { _eq: true } }] },
|
||||
sort: ["-join_date"],
|
||||
limit,
|
||||
}),
|
||||
return loggedApiCall(
|
||||
"getFeaturedModels",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Model[]>(
|
||||
readUsers({
|
||||
fields: ["*"],
|
||||
filter: { _and: [modelFilter, { featured: { _eq: true } }] },
|
||||
sort: ["-join_date"],
|
||||
limit,
|
||||
}),
|
||||
);
|
||||
},
|
||||
{ limit },
|
||||
);
|
||||
}
|
||||
|
||||
@@ -285,71 +385,101 @@ export async function getModelBySlug(
|
||||
slug: string,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Model[]>(
|
||||
readUsers({
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
photos: [
|
||||
return loggedApiCall(
|
||||
"getModelBySlug",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<Model[]>(
|
||||
readUsers({
|
||||
fields: [
|
||||
"*",
|
||||
{
|
||||
directus_files_id: ["*"],
|
||||
photos: [
|
||||
"*",
|
||||
{
|
||||
directus_files_id: ["*"],
|
||||
},
|
||||
],
|
||||
},
|
||||
"banner.*",
|
||||
],
|
||||
},
|
||||
"banner.*",
|
||||
],
|
||||
filter: { _and: [modelFilter, { slug: { _eq: slug } }] },
|
||||
}),
|
||||
)
|
||||
.then((models) => {
|
||||
if (models.length === 0) {
|
||||
throw new Error("Model not found");
|
||||
}
|
||||
models[0].photos = models[0].photos.map((p) => p.directus_files_id!);
|
||||
return models[0];
|
||||
});
|
||||
}
|
||||
|
||||
export async function updateProfile(user: Partial<User>) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<User>(updateMe(user as never));
|
||||
}
|
||||
|
||||
export async function getStats(fetch?: typeof globalThis.fetch) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Stats>(
|
||||
customEndpoint({
|
||||
path: "/sexy/stats",
|
||||
}),
|
||||
filter: { _and: [modelFilter, { slug: { _eq: slug } }] },
|
||||
}),
|
||||
)
|
||||
.then((models) => {
|
||||
if (models.length === 0) {
|
||||
throw new Error("Model not found");
|
||||
}
|
||||
models[0].photos = models[0].photos.map((p) => p.directus_files_id!);
|
||||
return models[0];
|
||||
});
|
||||
},
|
||||
{ slug },
|
||||
);
|
||||
}
|
||||
|
||||
export async function updateProfile(user: Partial<User>) {
|
||||
return loggedApiCall(
|
||||
"updateProfile",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<User>(updateMe(user as never));
|
||||
},
|
||||
{ userId: user.id },
|
||||
);
|
||||
}
|
||||
|
||||
export async function getStats(fetch?: typeof globalThis.fetch) {
|
||||
return loggedApiCall("getStats", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request<Stats>(
|
||||
customEndpoint({
|
||||
path: "/sexy/stats",
|
||||
}),
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export async function getFolders(fetch?: typeof globalThis.fetch) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(readFolders());
|
||||
return loggedApiCall("getFolders", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(readFolders());
|
||||
});
|
||||
}
|
||||
|
||||
export async function removeFile(id: string) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(deleteFile(id));
|
||||
return loggedApiCall(
|
||||
"removeFile",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(deleteFile(id));
|
||||
},
|
||||
{ fileId: id },
|
||||
);
|
||||
}
|
||||
|
||||
export async function uploadFile(data: FormData) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(uploadFiles(data));
|
||||
return loggedApiCall("uploadFile", async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(uploadFiles(data));
|
||||
});
|
||||
}
|
||||
|
||||
export async function createCommentForVideo(item: string, comment: string) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
createComment({
|
||||
collection: "sexy_videos",
|
||||
item,
|
||||
comment,
|
||||
}),
|
||||
return loggedApiCall(
|
||||
"createCommentForVideo",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
createComment({
|
||||
collection: "sexy_videos",
|
||||
item,
|
||||
comment,
|
||||
}),
|
||||
);
|
||||
},
|
||||
{ videoId: item, commentLength: comment.length },
|
||||
);
|
||||
}
|
||||
|
||||
@@ -357,13 +487,19 @@ export async function getCommentsForVideo(
|
||||
item: string,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
readComments({
|
||||
fields: ["*", { user_created: ["*"] }],
|
||||
filter: { collection: "sexy_videos", item },
|
||||
sort: ["-date_created"],
|
||||
}),
|
||||
return loggedApiCall(
|
||||
"getCommentsForVideo",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus.request(
|
||||
readComments({
|
||||
fields: ["*", { user_created: ["*"] }],
|
||||
filter: { collection: "sexy_videos", item },
|
||||
sort: ["-date_created"],
|
||||
}),
|
||||
);
|
||||
},
|
||||
{ videoId: item },
|
||||
);
|
||||
}
|
||||
|
||||
@@ -371,19 +507,25 @@ export async function countCommentsForModel(
|
||||
user_created: string,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<[{ count: number }]>(
|
||||
aggregate("directus_comments", {
|
||||
aggregate: {
|
||||
count: "*",
|
||||
},
|
||||
query: {
|
||||
filter: { user_created },
|
||||
},
|
||||
}),
|
||||
)
|
||||
.then((result) => result[0].count);
|
||||
return loggedApiCall(
|
||||
"countCommentsForModel",
|
||||
async () => {
|
||||
const directus = getDirectusInstance(fetch);
|
||||
return directus
|
||||
.request<[{ count: number }]>(
|
||||
aggregate("directus_comments", {
|
||||
aggregate: {
|
||||
count: "*",
|
||||
},
|
||||
query: {
|
||||
filter: { user_created },
|
||||
},
|
||||
}),
|
||||
)
|
||||
.then((result) => result[0].count);
|
||||
},
|
||||
{ userId: user_created },
|
||||
);
|
||||
}
|
||||
|
||||
export async function getItemsByTag(
|
||||
@@ -391,12 +533,18 @@ export async function getItemsByTag(
|
||||
tag: string,
|
||||
fetch?: typeof globalThis.fetch,
|
||||
) {
|
||||
switch (category) {
|
||||
case "video":
|
||||
return getVideos(fetch);
|
||||
case "model":
|
||||
return getModels(fetch);
|
||||
case "article":
|
||||
return getArticles(fetch);
|
||||
}
|
||||
return loggedApiCall(
|
||||
"getItemsByTag",
|
||||
async () => {
|
||||
switch (category) {
|
||||
case "video":
|
||||
return getVideos(fetch);
|
||||
case "model":
|
||||
return getModels(fetch);
|
||||
case "article":
|
||||
return getArticles(fetch);
|
||||
}
|
||||
},
|
||||
{ category, tag },
|
||||
);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user