Compare commits
90 Commits
buttplug-1
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 798495c3d6 | |||
| fde0d63271 | |||
| 754a236e51 | |||
| dfe49b5882 | |||
| 9ba848372a | |||
| dcf2fbd3d4 | |||
| bff354094e | |||
| 6f2f3b3529 | |||
| f2871b98db | |||
| 9c5dba5c90 | |||
| c90c09da9a | |||
| aed7b4a16f | |||
| 454c477c40 | |||
| 3cf81bd381 | |||
| ac63e59906 | |||
| 19d29cbfc6 | |||
| 0ec27117ae | |||
| ed9eb6ef22 | |||
| 609f116b5d | |||
| e943876e70 | |||
| 7d373b3aa3 | |||
| 95fd9f48fc | |||
| 670c18bcb7 | |||
| 9ef490c1e5 | |||
| 434e926f77 | |||
| 7a9ce0c3b1 | |||
| ff1e1f6679 | |||
| 648123fab5 | |||
| a7fafaf7c5 | |||
| b71d7dc559 | |||
| f764e27d59 | |||
| d7eb2acc6c | |||
| fb38d6b9a9 | |||
| d021acaf0b | |||
| e06a1915f2 | |||
| ebab3405b1 | |||
| ad7ceee5f8 | |||
| c1770ab9c9 | |||
| b200498a10 | |||
| 1369d5c228 | |||
| e200514347 | |||
| d7057c3681 | |||
| d820a8f6be | |||
| 9bef2469d1 | |||
| 97269788ee | |||
| c6126c13e9 | |||
| fd4050a49f | |||
| efc7624ba3 | |||
| 18116072c9 | |||
| 741e0c3387 | |||
| 662e3e8fe2 | |||
| fa159feffa | |||
| 124f0bfb22 | |||
| df89cc59f5 | |||
| 845e3df223 | |||
| 05cb6a66e3 | |||
| 273aa42510 | |||
| 1e930baccb | |||
| 012bb176d9 | |||
| ed7ac0c573 | |||
| 4565038be3 | |||
| fbafbeca5d | |||
| 480369aa4e | |||
| ceb57ec1c4 | |||
| 4f8271217c | |||
| 046689e363 | |||
| 9ba71239b7 | |||
| 757bbe9e3b | |||
| 73f7a4f2f0 | |||
| 3bd8d95576 | |||
| 14e816241d | |||
| 4102f9990c | |||
| 2565e6c28b | |||
| 493ddd7e78 | |||
| 33dd076a50 | |||
| 9d7afbe1b5 | |||
| de16b64255 | |||
| 1e69d0b158 | |||
| 865787fb45 | |||
| 3915dbc115 | |||
| 83ca9d4fb5 | |||
| 225b9d41f5 | |||
| ad83fb553a | |||
| 2be36a679d | |||
| 75d4b4227c | |||
| 2277e4f686 | |||
| 13c6977e59 | |||
| c85fa7798e | |||
| ce30eca574 | |||
| 6724afa939 |
60
.gitea/workflows/docker-build-backend.yml
Normal file
60
.gitea/workflows/docker-build-backend.yml
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
name: Build and Push Backend Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
tags:
|
||||||
|
- "v*.*.*"
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: dev.pivoine.art
|
||||||
|
IMAGE_NAME: valknar/sexy-backend
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64
|
||||||
|
|
||||||
|
- name: Log in to Gitea Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=sha,prefix={{branch}}-
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile.backend
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: ${{ gitea.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache
|
||||||
|
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache,mode=max
|
||||||
60
.gitea/workflows/docker-build-frontend.yml
Normal file
60
.gitea/workflows/docker-build-frontend.yml
Normal file
@@ -0,0 +1,60 @@
|
|||||||
|
name: Build and Push Frontend Image
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
- develop
|
||||||
|
tags:
|
||||||
|
- "v*.*.*"
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
env:
|
||||||
|
REGISTRY: dev.pivoine.art
|
||||||
|
IMAGE_NAME: valknar/sexy
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Checkout repository
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
platforms: linux/amd64
|
||||||
|
|
||||||
|
- name: Log in to Gitea Container Registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: ${{ env.REGISTRY }}
|
||||||
|
username: ${{ gitea.actor }}
|
||||||
|
password: ${{ secrets.REGISTRY_TOKEN }}
|
||||||
|
|
||||||
|
- name: Extract metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v5
|
||||||
|
with:
|
||||||
|
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=latest,enable={{is_default_branch}}
|
||||||
|
type=ref,event=branch
|
||||||
|
type=ref,event=pr
|
||||||
|
type=semver,pattern={{version}}
|
||||||
|
type=sha,prefix={{branch}}-
|
||||||
|
|
||||||
|
- name: Build and push
|
||||||
|
uses: docker/build-push-action@v5
|
||||||
|
with:
|
||||||
|
context: .
|
||||||
|
file: Dockerfile
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: ${{ gitea.event_name != 'pull_request' }}
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache
|
||||||
|
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache,mode=max
|
||||||
@@ -1,112 +0,0 @@
|
|||||||
name: Build and Push Docker Image to Gitea
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
- develop
|
|
||||||
tags:
|
|
||||||
- 'v*.*.*'
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
workflow_dispatch:
|
|
||||||
inputs:
|
|
||||||
tag:
|
|
||||||
description: 'Custom tag for the image'
|
|
||||||
required: false
|
|
||||||
default: 'manual'
|
|
||||||
|
|
||||||
env:
|
|
||||||
REGISTRY: dev.pivoine.art
|
|
||||||
IMAGE_NAME: valknar/sexy
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-and-push:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: Checkout repository
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
|
||||||
uses: docker/setup-buildx-action@v3
|
|
||||||
with:
|
|
||||||
platforms: linux/amd64
|
|
||||||
|
|
||||||
- name: Log in to Gitea Container Registry
|
|
||||||
uses: docker/login-action@v3
|
|
||||||
with:
|
|
||||||
registry: ${{ env.REGISTRY }}
|
|
||||||
username: ${{ gitea.actor }}
|
|
||||||
password: ${{ secrets.REGISTRY_TOKEN }}
|
|
||||||
|
|
||||||
- name: Extract metadata (tags, labels)
|
|
||||||
id: meta
|
|
||||||
uses: docker/metadata-action@v5
|
|
||||||
with:
|
|
||||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
|
||||||
tags: |
|
|
||||||
# Tag as 'latest' for main branch
|
|
||||||
type=raw,value=latest,enable={{is_default_branch}}
|
|
||||||
# Tag with branch name
|
|
||||||
type=ref,event=branch
|
|
||||||
# Tag with PR number
|
|
||||||
type=ref,event=pr
|
|
||||||
# Tag with git tag (semver)
|
|
||||||
type=semver,pattern={{version}}
|
|
||||||
type=semver,pattern={{major}}.{{minor}}
|
|
||||||
type=semver,pattern={{major}}
|
|
||||||
# Tag with commit SHA
|
|
||||||
type=sha,prefix={{branch}}-
|
|
||||||
# Custom tag from workflow_dispatch
|
|
||||||
type=raw,value=${{ gitea.event.inputs.tag }},enable=${{ gitea.event_name == 'workflow_dispatch' }}
|
|
||||||
labels: |
|
|
||||||
org.opencontainers.image.title=sexy.pivoine.art
|
|
||||||
org.opencontainers.image.description=Adult content platform with SvelteKit, Directus, and hardware integration
|
|
||||||
org.opencontainers.image.vendor=valknar
|
|
||||||
org.opencontainers.image.source=https://dev.pivoine.art/${{ gitea.repository }}
|
|
||||||
|
|
||||||
- name: Build and push Docker image
|
|
||||||
uses: docker/build-push-action@v5
|
|
||||||
with:
|
|
||||||
context: .
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: ${{ gitea.event_name != 'pull_request' }}
|
|
||||||
tags: ${{ steps.meta.outputs.tags }}
|
|
||||||
labels: ${{ steps.meta.outputs.labels }}
|
|
||||||
cache-from: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache
|
|
||||||
cache-to: type=registry,ref=${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:buildcache,mode=max
|
|
||||||
build-args: |
|
|
||||||
NODE_ENV=production
|
|
||||||
CI=true
|
|
||||||
|
|
||||||
- name: Generate image digest
|
|
||||||
if: gitea.event_name != 'pull_request'
|
|
||||||
run: |
|
|
||||||
echo "### Docker Image Published :rocket:" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "**Registry:** \`${{ env.REGISTRY }}\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "**Image:** \`${{ env.IMAGE_NAME }}\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "**Tags:**" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "\`\`\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "${{ steps.meta.outputs.tags }}" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "\`\`\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "**Pull command:**" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "\`\`\`bash" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "docker pull ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}:latest" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "\`\`\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
|
|
||||||
- name: PR Comment - Image built but not pushed
|
|
||||||
if: gitea.event_name == 'pull_request'
|
|
||||||
run: |
|
|
||||||
echo "### Docker Image Built Successfully :white_check_mark:" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "Image was built successfully but **not pushed** (PR builds are not published)." >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "**Would be tagged as:**" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "\`\`\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "${{ steps.meta.outputs.tags }}" >> $GITEA_STEP_SUMMARY
|
|
||||||
echo "\`\`\`" >> $GITEA_STEP_SUMMARY
|
|
||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -3,5 +3,5 @@ dist/
|
|||||||
target/
|
target/
|
||||||
pkg/
|
pkg/
|
||||||
|
|
||||||
.env.*
|
.claude/
|
||||||
|
.data/
|
||||||
|
|||||||
6
.prettierignore
Normal file
6
.prettierignore
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
build/
|
||||||
|
.svelte-kit/
|
||||||
|
dist/
|
||||||
|
node_modules/
|
||||||
|
migrations/
|
||||||
|
pnpm-lock.yaml
|
||||||
241
CLAUDE.md
241
CLAUDE.md
@@ -2,176 +2,93 @@
|
|||||||
|
|
||||||
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
|
||||||
|
|
||||||
## Project Overview
|
## Overview
|
||||||
|
|
||||||
This is a monorepo for an adult content platform built with SvelteKit, Directus CMS, and hardware integration via Buttplug.io. The project uses pnpm workspaces with three main packages.
|
`sexy.pivoine.art` is a self-hosted adult content platform (18+) built as a pnpm monorepo with three packages: `frontend` (SvelteKit 5), `backend` (Fastify + GraphQL), and `buttplug` (hardware integration via WebBluetooth/WASM).
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
1. Install Node.js 20.19.1
|
|
||||||
2. Enable corepack: `corepack enable`
|
|
||||||
3. Install dependencies: `pnpm install`
|
|
||||||
4. Install Rust toolchain and wasm-bindgen: `cargo install wasm-bindgen-cli`
|
|
||||||
|
|
||||||
## Project Structure
|
|
||||||
|
|
||||||
### Packages
|
|
||||||
|
|
||||||
- **`packages/frontend`**: SvelteKit application (main frontend)
|
|
||||||
- **`packages/bundle`**: Directus extension bundle (custom endpoints, hooks, themes)
|
|
||||||
- **`packages/buttplug`**: Hardware control library with TypeScript/WebAssembly bindings
|
|
||||||
|
|
||||||
### Frontend (SvelteKit + Tailwind CSS 4)
|
|
||||||
|
|
||||||
- **Framework**: SvelteKit 2 with adapter-node
|
|
||||||
- **Styling**: Tailwind CSS v4 via @tailwindcss/vite
|
|
||||||
- **UI Components**: bits-ui, custom components in `src/lib/components/ui/`
|
|
||||||
- **Backend**: Directus headless CMS
|
|
||||||
- **Routes**: File-based routing in `src/routes/`
|
|
||||||
- `+page.server.ts`: Server-side data loading
|
|
||||||
- `+layout.server.ts`: Layout data (authentication, etc.)
|
|
||||||
- **Authentication**: Session-based via Directus SDK (cookies)
|
|
||||||
- **API Proxy**: Dev server proxies `/api` to `http://localhost:8055` (Directus)
|
|
||||||
- **i18n**: svelte-i18n for internationalization
|
|
||||||
|
|
||||||
Key files:
|
|
||||||
- `src/lib/directus.ts`: Directus client configuration
|
|
||||||
- `src/lib/types.ts`: Shared TypeScript types
|
|
||||||
- `src/hooks.server.ts`: Server-side auth middleware
|
|
||||||
- `vite.config.ts`: Dev server on port 3000 with API proxy
|
|
||||||
|
|
||||||
### Bundle (Directus Extensions)
|
|
||||||
|
|
||||||
Custom Directus extensions providing:
|
|
||||||
- **Endpoint** (`src/endpoint/index.ts`): `/sexy/stats` endpoint for platform statistics
|
|
||||||
- **Hook** (`src/hook/index.ts`):
|
|
||||||
- Auto-generates slugs for users based on artist_name
|
|
||||||
- Processes uploaded videos with ffmpeg to extract duration
|
|
||||||
- **Theme** (`src/theme/index.ts`): Custom Directus admin theme
|
|
||||||
|
|
||||||
### Buttplug (Hardware Control)
|
|
||||||
|
|
||||||
Hybrid TypeScript/Rust package for intimate hardware control:
|
|
||||||
- **TypeScript**: Client library, connectors (WebSocket, Browser WebSocket)
|
|
||||||
- **Rust/WASM**: Core buttplug implementation compiled to WebAssembly
|
|
||||||
- Provides browser-based Bluetooth device control via WebBluetooth API
|
|
||||||
|
|
||||||
Key concepts:
|
|
||||||
- `ButtplugClient`: Main client interface
|
|
||||||
- `ButtplugClientDevice`: Device abstraction
|
|
||||||
- `ButtplugWasmClientConnector`: WASM-based connector
|
|
||||||
- Messages defined in `src/core/Messages.ts`
|
|
||||||
|
|
||||||
## Common Commands
|
## Common Commands
|
||||||
|
|
||||||
### Development
|
Run from the repo root unless otherwise noted.
|
||||||
|
|
||||||
Start full development environment (data + Directus + frontend):
|
|
||||||
```bash
|
```bash
|
||||||
pnpm dev
|
# Development
|
||||||
|
pnpm dev:data # Start postgres & redis via Docker
|
||||||
|
pnpm dev:backend # Start backend on http://localhost:4000
|
||||||
|
pnpm dev # Start backend + frontend (frontend on :3000)
|
||||||
|
|
||||||
|
# Linting & Formatting
|
||||||
|
pnpm lint # ESLint across all packages
|
||||||
|
pnpm lint:fix # Auto-fix ESLint issues
|
||||||
|
pnpm format # Prettier format all files
|
||||||
|
pnpm format:check # Check formatting without changes
|
||||||
|
|
||||||
|
# Build
|
||||||
|
pnpm build:frontend # SvelteKit production build
|
||||||
|
pnpm build:backend # Compile backend TypeScript to dist/
|
||||||
|
|
||||||
|
# Database migrations (from packages/backend/)
|
||||||
|
pnpm migrate # Run pending Drizzle migrations
|
||||||
```
|
```
|
||||||
|
|
||||||
Individual services:
|
## Architecture
|
||||||
|
|
||||||
|
### Monorepo Layout
|
||||||
|
|
||||||
|
```
|
||||||
|
packages/
|
||||||
|
frontend/ # SvelteKit 2 + Svelte 5 + Tailwind CSS 4
|
||||||
|
backend/ # Fastify v5 + GraphQL Yoga v5 + Drizzle ORM
|
||||||
|
buttplug/ # TypeScript/Rust hybrid, compiles to WASM
|
||||||
|
```
|
||||||
|
|
||||||
|
### Backend (`packages/backend/src/`)
|
||||||
|
|
||||||
|
- **`index.ts`** — Fastify server entry: registers plugins (CORS, multipart, static), mounts GraphQL at `/graphql`, serves transformed assets at `/assets/:id`
|
||||||
|
- **`graphql/builder.ts`** — Pothos schema builder (code-first GraphQL)
|
||||||
|
- **`graphql/context.ts`** — Injects `currentUser` from Redis session into every request
|
||||||
|
- **`lib/auth.ts`** — Session management: `nanoid(32)` token stored in Redis with 24h TTL, set as httpOnly cookie
|
||||||
|
- **`db/schema/`** — Drizzle ORM table definitions (users, videos, files, comments, gamification, etc.)
|
||||||
|
- **`migrations/`** — SQL migration files managed by Drizzle Kit
|
||||||
|
|
||||||
|
### Frontend (`packages/frontend/src/`)
|
||||||
|
|
||||||
|
- **`lib/api.ts`** — GraphQL client (graphql-request)
|
||||||
|
- **`lib/services.ts`** — All API calls (login, videos, comments, models, etc.)
|
||||||
|
- **`lib/types.ts`** — Shared TypeScript types
|
||||||
|
- **`hooks.server.ts`** — Auth guard: reads session cookie, fetches `me` query, redirects if needed
|
||||||
|
- **`routes/`** — SvelteKit file-based routing: `/`, `/login`, `/signup`, `/me`, `/models`, `/models/[slug]`, `/videos`, `/play/[slug]`, `/magazine`, `/leaderboard`
|
||||||
|
|
||||||
|
### Asset Pipeline
|
||||||
|
|
||||||
|
Backend serves images with server-side Sharp transforms, cached to disk as WebP. Presets: `mini` (80×80), `thumbnail` (300×300), `preview` (800px wide), `medium` (1400px wide), `banner` (1600×480 cropped).
|
||||||
|
|
||||||
|
### Gamification
|
||||||
|
|
||||||
|
Points + achievements system tracked in `user_points` and `user_stats` tables. Logic in `packages/backend/src/lib/gamification.ts` and the `gamification` resolver.
|
||||||
|
|
||||||
|
## Code Style
|
||||||
|
|
||||||
|
- **TypeScript strict mode** in all packages
|
||||||
|
- **ESLint flat config** (`eslint.config.js` at root) — `any` is allowed but discouraged; enforces consistent type imports
|
||||||
|
- **Prettier**: 2-space indent, trailing commas, 100-char line width, Svelte plugin
|
||||||
|
- Migrations folder (`packages/backend/src/migrations/`) is excluded from lint
|
||||||
|
|
||||||
|
## Environment Variables (Backend)
|
||||||
|
|
||||||
|
| Variable | Purpose |
|
||||||
|
| --------------------------- | ---------------------------- |
|
||||||
|
| `DATABASE_URL` | PostgreSQL connection string |
|
||||||
|
| `REDIS_URL` | Redis connection string |
|
||||||
|
| `COOKIE_SECRET` | Session cookie signing |
|
||||||
|
| `CORS_ORIGIN` | Frontend origin URL |
|
||||||
|
| `UPLOAD_DIR` | File storage path |
|
||||||
|
| `SMTP_HOST/PORT/EMAIL_FROM` | Email (Nodemailer) |
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pnpm dev:data # Start Docker Compose data services
|
docker compose up -d # Start all services (postgres, redis, backend, frontend)
|
||||||
pnpm dev:directus # Start Directus in Docker
|
arty up -d <service> # Preferred way to manage containers in this project
|
||||||
pnpm --filter @sexy.pivoine.art/frontend dev # Frontend dev server only
|
|
||||||
```
|
```
|
||||||
|
|
||||||
### Building
|
Production images are built and pushed to `dev.pivoine.art` via Gitea Actions on push to `main`.
|
||||||
|
|
||||||
Build all packages:
|
|
||||||
```bash
|
|
||||||
pnpm install # Ensure dependencies are installed first
|
|
||||||
```
|
|
||||||
|
|
||||||
Build specific packages:
|
|
||||||
```bash
|
|
||||||
pnpm build:frontend # Pulls git, installs, builds frontend
|
|
||||||
pnpm build:bundle # Pulls git, installs, builds Directus extensions
|
|
||||||
```
|
|
||||||
|
|
||||||
Individual package builds:
|
|
||||||
```bash
|
|
||||||
pnpm --filter @sexy.pivoine.art/frontend build
|
|
||||||
pnpm --filter @sexy.pivoine.art/bundle build
|
|
||||||
pnpm --filter @sexy.pivoine.art/buttplug build # TypeScript build
|
|
||||||
pnpm --filter @sexy.pivoine.art/buttplug build:wasm # Rust WASM build
|
|
||||||
```
|
|
||||||
|
|
||||||
### Production
|
|
||||||
|
|
||||||
Start production frontend server (local):
|
|
||||||
```bash
|
|
||||||
pnpm --filter @sexy.pivoine.art/frontend start
|
|
||||||
```
|
|
||||||
|
|
||||||
Docker Compose deployment (recommended for production):
|
|
||||||
```bash
|
|
||||||
# Local development (with Postgres, Redis, Directus)
|
|
||||||
docker-compose up -d
|
|
||||||
|
|
||||||
# Production (with Traefik, external DB, Redis)
|
|
||||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
See `COMPOSE.md` for Docker Compose guide and `DOCKER.md` for standalone Docker deployment.
|
|
||||||
|
|
||||||
## Architecture Notes
|
|
||||||
|
|
||||||
### Data Flow
|
|
||||||
|
|
||||||
1. **Frontend** → `/api/*` (proxied) → **Directus CMS**
|
|
||||||
2. Directus uses **bundle extensions** for custom logic (stats, video processing, user management)
|
|
||||||
3. Frontend uses **Directus SDK** with session authentication
|
|
||||||
4. Hardware control uses **buttplug package** (TypeScript → WASM → Bluetooth)
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
|
|
||||||
- Session tokens stored in `directus_session_token` cookie
|
|
||||||
- `hooks.server.ts` validates token on every request via `isAuthenticated()`
|
|
||||||
- User roles: Model, Viewer (checked via role or policy)
|
|
||||||
- `isModel()` helper in `src/lib/directus.ts` checks user permissions
|
|
||||||
|
|
||||||
### Content Types
|
|
||||||
|
|
||||||
Core types in `packages/frontend/src/lib/types.ts`:
|
|
||||||
- **User/CurrentUser**: User profiles with roles and policies
|
|
||||||
- **Video**: Videos with models, tags, premium flag
|
|
||||||
- **Model**: Creator profiles with photos and banner
|
|
||||||
- **Article**: Magazine/blog content
|
|
||||||
- **BluetoothDevice**: Hardware device state
|
|
||||||
|
|
||||||
### Docker Environment
|
|
||||||
|
|
||||||
Development uses Docker Compose in `../compose/` directory:
|
|
||||||
- `../compose/data`: Database/storage services
|
|
||||||
- `../compose/sexy`: Directus instance (uses `.env.local`)
|
|
||||||
|
|
||||||
### Asset URLs
|
|
||||||
|
|
||||||
Assets served via Directus with transforms:
|
|
||||||
```typescript
|
|
||||||
getAssetUrl(id, "thumbnail" | "preview" | "medium" | "banner")
|
|
||||||
// Returns: ${directusApiUrl}/assets/${id}?transform=...
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development Workflow
|
|
||||||
|
|
||||||
1. Ensure Docker services are running: `pnpm dev:data && pnpm dev:directus`
|
|
||||||
2. Start frontend dev server: `pnpm --filter @sexy.pivoine.art/frontend dev`
|
|
||||||
3. Access frontend at `http://localhost:3000`
|
|
||||||
4. Access Directus admin at `http://localhost:8055`
|
|
||||||
|
|
||||||
When modifying:
|
|
||||||
- **Frontend code**: Hot reload via Vite
|
|
||||||
- **Bundle extensions**: Rebuild with `pnpm --filter @sexy.pivoine.art/bundle build` and restart Directus
|
|
||||||
- **Buttplug library**: Rebuild TypeScript (`pnpm build`) and/or WASM (`pnpm build:wasm`)
|
|
||||||
|
|
||||||
## Important Notes
|
|
||||||
|
|
||||||
- This is a pnpm workspace; always use `pnpm` not `npm` or `yarn`
|
|
||||||
- Package manager is locked to `pnpm@10.17.0`
|
|
||||||
- Buttplug package requires Rust toolchain for WASM builds
|
|
||||||
- Frontend uses SvelteKit's adapter-node for production deployment
|
|
||||||
- All TypeScript packages use ES modules (`"type": "module"`)
|
|
||||||
|
|||||||
424
COMPOSE.md
424
COMPOSE.md
@@ -1,424 +0,0 @@
|
|||||||
# Docker Compose Guide
|
|
||||||
|
|
||||||
This guide explains the Docker Compose setup for sexy.pivoine.art with local development and production configurations.
|
|
||||||
|
|
||||||
## Architecture Overview
|
|
||||||
|
|
||||||
The application uses a **multi-file compose setup** with two configurations:
|
|
||||||
|
|
||||||
1. **`compose.yml`** - Base configuration for local development
|
|
||||||
2. **`compose.production.yml`** - Production overrides with Traefik integration
|
|
||||||
|
|
||||||
### Service Architecture
|
|
||||||
|
|
||||||
```
|
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
|
||||||
│ 🌐 Traefik Reverse Proxy (Production Only) │
|
|
||||||
│ ├─ HTTPS Termination │
|
|
||||||
│ ├─ Automatic Let's Encrypt │
|
|
||||||
│ └─ Routes traffic to frontend & Directus API │
|
|
||||||
├─────────────────────────────────────────────────────────────┤
|
|
||||||
│ 💄 Frontend (SvelteKit) │
|
|
||||||
│ ├─ Port 3000 (internal) │
|
|
||||||
│ ├─ Serves on https://sexy.pivoine.art │
|
|
||||||
│ └─ Proxies /api to Directus │
|
|
||||||
├─────────────────────────────────────────────────────────────┤
|
|
||||||
│ 🎭 Directus CMS │
|
|
||||||
│ ├─ Port 8055 (internal) │
|
|
||||||
│ ├─ Serves on https://sexy.pivoine.art/api │
|
|
||||||
│ ├─ Custom bundle extensions mounted │
|
|
||||||
│ └─ Uploads volume │
|
|
||||||
├─────────────────────────────────────────────────────────────┤
|
|
||||||
│ 🗄️ PostgreSQL (Local) / External (Production) │
|
|
||||||
│ └─ Database for Directus │
|
|
||||||
├─────────────────────────────────────────────────────────────┤
|
|
||||||
│ 💾 Redis (Local) / External (Production) │
|
|
||||||
│ └─ Cache & session storage │
|
|
||||||
└─────────────────────────────────────────────────────────────┘
|
|
||||||
```
|
|
||||||
|
|
||||||
## Local Development Setup
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
- Docker 20.10+
|
|
||||||
- Docker Compose 2.0+
|
|
||||||
|
|
||||||
### Quick Start
|
|
||||||
|
|
||||||
1. **Create environment file:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp .env.example .env
|
|
||||||
# Edit .env with your local settings (defaults work fine)
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Start all services:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Access services:**
|
|
||||||
- Frontend: http://localhost:3000 (if enabled)
|
|
||||||
- Directus: http://localhost:8055
|
|
||||||
- Directus Admin: http://localhost:8055/admin
|
|
||||||
|
|
||||||
4. **View logs:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose logs -f
|
|
||||||
```
|
|
||||||
|
|
||||||
5. **Stop services:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose down
|
|
||||||
```
|
|
||||||
|
|
||||||
### Local Services
|
|
||||||
|
|
||||||
#### PostgreSQL
|
|
||||||
- **Image:** `postgres:16-alpine`
|
|
||||||
- **Port:** 5432 (internal only)
|
|
||||||
- **Volume:** `postgres-data`
|
|
||||||
- **Database:** `sexy`
|
|
||||||
|
|
||||||
#### Redis
|
|
||||||
- **Image:** `redis:7-alpine`
|
|
||||||
- **Port:** 6379 (internal only)
|
|
||||||
- **Volume:** `redis-data`
|
|
||||||
- **Persistence:** AOF enabled
|
|
||||||
|
|
||||||
#### Directus
|
|
||||||
- **Image:** `directus/directus:11`
|
|
||||||
- **Port:** 8055 (exposed)
|
|
||||||
- **Volumes:**
|
|
||||||
- `directus-uploads` - File uploads
|
|
||||||
- `./packages/bundle/dist` - Custom extensions
|
|
||||||
- **Features:**
|
|
||||||
- Auto-reload extensions
|
|
||||||
- WebSockets enabled
|
|
||||||
- CORS enabled for localhost
|
|
||||||
|
|
||||||
### Local Development Workflow
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Start infrastructure (Postgres, Redis, Directus)
|
|
||||||
docker-compose up -d
|
|
||||||
|
|
||||||
# Develop frontend locally with hot reload
|
|
||||||
cd packages/frontend
|
|
||||||
pnpm dev
|
|
||||||
|
|
||||||
# Build Directus bundle
|
|
||||||
pnpm --filter @sexy.pivoine.art/bundle build
|
|
||||||
|
|
||||||
# Restart Directus to load new bundle
|
|
||||||
docker-compose restart directus
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production Deployment
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
- External PostgreSQL database
|
|
||||||
- External Redis instance
|
|
||||||
- Traefik reverse proxy configured
|
|
||||||
- External network: `compose_network`
|
|
||||||
|
|
||||||
### Setup
|
|
||||||
|
|
||||||
The production compose file now uses the `include` directive to automatically extend `compose.yml`, making deployment simpler.
|
|
||||||
|
|
||||||
1. **Create production environment file:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp .env.production.example .env.production
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Edit `.env.production` with your values:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Database (external)
|
|
||||||
CORE_DB_HOST=your-postgres-host
|
|
||||||
SEXY_DB_NAME=sexy_production
|
|
||||||
DB_USER=sexy
|
|
||||||
DB_PASSWORD=your-secure-password
|
|
||||||
|
|
||||||
# Redis (external)
|
|
||||||
CORE_REDIS_HOST=your-redis-host
|
|
||||||
|
|
||||||
# Directus
|
|
||||||
SEXY_DIRECTUS_SECRET=your-32-char-random-secret
|
|
||||||
ADMIN_PASSWORD=your-secure-admin-password
|
|
||||||
|
|
||||||
# Traefik
|
|
||||||
SEXY_TRAEFIK_HOST=sexy.pivoine.art
|
|
||||||
|
|
||||||
# Frontend
|
|
||||||
PUBLIC_API_URL=https://sexy.pivoine.art/api
|
|
||||||
PUBLIC_URL=https://sexy.pivoine.art
|
|
||||||
|
|
||||||
# Email (SMTP)
|
|
||||||
EMAIL_SMTP_HOST=smtp.your-provider.com
|
|
||||||
EMAIL_SMTP_USER=your-email@domain.com
|
|
||||||
EMAIL_SMTP_PASSWORD=your-smtp-password
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Deploy:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Simple deployment - compose.production.yml includes compose.yml automatically
|
|
||||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
|
||||||
|
|
||||||
# Or use the traditional multi-file approach (same result)
|
|
||||||
docker-compose -f compose.yml -f compose.production.yml --env-file .env.production up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### Production Services
|
|
||||||
|
|
||||||
#### Directus
|
|
||||||
- **Image:** `directus/directus:11` (configurable)
|
|
||||||
- **Network:** `compose_network` (external)
|
|
||||||
- **Volumes:**
|
|
||||||
- `/var/www/sexy.pivoine.art/uploads` - Persistent uploads
|
|
||||||
- `/var/www/sexy.pivoine.art/packages/bundle/dist` - Extensions
|
|
||||||
- **Traefik routing:**
|
|
||||||
- Domain: `sexy.pivoine.art/api`
|
|
||||||
- Strips `/api` prefix before forwarding
|
|
||||||
- HTTPS with auto-certificates
|
|
||||||
|
|
||||||
#### Frontend
|
|
||||||
- **Image:** `ghcr.io/valknarxxx/sexy:latest` (from GHCR)
|
|
||||||
- **Network:** `compose_network` (external)
|
|
||||||
- **Volume:** `/var/www/sexy.pivoine.art` - Application code
|
|
||||||
- **Traefik routing:**
|
|
||||||
- Domain: `sexy.pivoine.art`
|
|
||||||
- HTTPS with auto-certificates
|
|
||||||
|
|
||||||
### Traefik Integration
|
|
||||||
|
|
||||||
Both services are configured with Traefik labels for automatic routing:
|
|
||||||
|
|
||||||
**Frontend:**
|
|
||||||
- HTTP → HTTPS redirect
|
|
||||||
- Routes `sexy.pivoine.art` to port 3000
|
|
||||||
- Gzip compression enabled
|
|
||||||
|
|
||||||
**Directus API:**
|
|
||||||
- HTTP → HTTPS redirect
|
|
||||||
- Routes `sexy.pivoine.art/api` to port 8055
|
|
||||||
- Strips `/api` prefix
|
|
||||||
- Gzip compression enabled
|
|
||||||
|
|
||||||
### Production Commands
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Deploy/update (simplified - uses include)
|
|
||||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
|
||||||
|
|
||||||
# View logs
|
|
||||||
docker-compose -f compose.production.yml logs -f
|
|
||||||
|
|
||||||
# Restart specific service
|
|
||||||
docker-compose -f compose.production.yml restart frontend
|
|
||||||
|
|
||||||
# Stop all services
|
|
||||||
docker-compose -f compose.production.yml down
|
|
||||||
|
|
||||||
# Update images
|
|
||||||
docker-compose -f compose.production.yml pull
|
|
||||||
docker-compose -f compose.production.yml up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
## Environment Variables
|
|
||||||
|
|
||||||
### Local Development (`.env`)
|
|
||||||
|
|
||||||
| Variable | Default | Description |
|
|
||||||
|----------|---------|-------------|
|
|
||||||
| `DB_DATABASE` | `sexy` | Database name |
|
|
||||||
| `DB_USER` | `sexy` | Database user |
|
|
||||||
| `DB_PASSWORD` | `sexy` | Database password |
|
|
||||||
| `DIRECTUS_SECRET` | - | Secret for Directus (min 32 chars) |
|
|
||||||
| `ADMIN_EMAIL` | `admin@sexy.pivoine.art` | Admin email |
|
|
||||||
| `ADMIN_PASSWORD` | `admin` | Admin password |
|
|
||||||
| `CORS_ORIGIN` | `http://localhost:3000` | CORS allowed origins |
|
|
||||||
|
|
||||||
See `.env.example` for full list.
|
|
||||||
|
|
||||||
### Production (`.env.production`)
|
|
||||||
|
|
||||||
| Variable | Description | Required |
|
|
||||||
|----------|-------------|----------|
|
|
||||||
| `CORE_DB_HOST` | External PostgreSQL host | ✅ |
|
|
||||||
| `SEXY_DB_NAME` | Database name | ✅ |
|
|
||||||
| `DB_PASSWORD` | Database password | ✅ |
|
|
||||||
| `CORE_REDIS_HOST` | External Redis host | ✅ |
|
|
||||||
| `SEXY_DIRECTUS_SECRET` | Directus secret key | ✅ |
|
|
||||||
| `SEXY_TRAEFIK_HOST` | Domain name | ✅ |
|
|
||||||
| `EMAIL_SMTP_HOST` | SMTP server | ✅ |
|
|
||||||
| `EMAIL_SMTP_PASSWORD` | SMTP password | ✅ |
|
|
||||||
| `SEXY_FRONTEND_PUBLIC_API_URL` | Frontend API URL | ✅ |
|
|
||||||
| `SEXY_FRONTEND_PUBLIC_URL` | Frontend public URL | ✅ |
|
|
||||||
|
|
||||||
See `.env.production.example` for full list.
|
|
||||||
|
|
||||||
**Note:** All frontend-specific variables are prefixed with `SEXY_FRONTEND_` for clarity.
|
|
||||||
|
|
||||||
## Volumes
|
|
||||||
|
|
||||||
### Local Development
|
|
||||||
|
|
||||||
- `postgres-data` - PostgreSQL database
|
|
||||||
- `redis-data` - Redis persistence
|
|
||||||
- `directus-uploads` - Uploaded files
|
|
||||||
|
|
||||||
### Production
|
|
||||||
|
|
||||||
- `/var/www/sexy.pivoine.art/uploads` - Directus uploads
|
|
||||||
- `/var/www/sexy.pivoine.art` - Application code (frontend)
|
|
||||||
|
|
||||||
## Networks
|
|
||||||
|
|
||||||
### Local: `sexy-network`
|
|
||||||
- Bridge network
|
|
||||||
- Internal communication only
|
|
||||||
- Directus exposed on 8055
|
|
||||||
|
|
||||||
### Production: `compose_network`
|
|
||||||
- External network (pre-existing)
|
|
||||||
- Connects to Traefik
|
|
||||||
- No exposed ports (Traefik handles routing)
|
|
||||||
|
|
||||||
## Health Checks
|
|
||||||
|
|
||||||
All services include health checks:
|
|
||||||
|
|
||||||
**PostgreSQL:**
|
|
||||||
- Command: `pg_isready`
|
|
||||||
- Interval: 10s
|
|
||||||
|
|
||||||
**Redis:**
|
|
||||||
- Command: `redis-cli ping`
|
|
||||||
- Interval: 10s
|
|
||||||
|
|
||||||
**Directus:**
|
|
||||||
- Endpoint: `/server/health`
|
|
||||||
- Interval: 30s
|
|
||||||
|
|
||||||
**Frontend:**
|
|
||||||
- HTTP GET: `localhost:3000`
|
|
||||||
- Interval: 30s
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Local Development
|
|
||||||
|
|
||||||
**Problem:** Directus won't start
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check logs
|
|
||||||
docker-compose logs directus
|
|
||||||
|
|
||||||
# Common issues:
|
|
||||||
# 1. Database not ready - wait for postgres to be healthy
|
|
||||||
# 2. Wrong secret - check DIRECTUS_SECRET is at least 32 chars
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** Can't connect to database
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check if postgres is running
|
|
||||||
docker-compose ps postgres
|
|
||||||
|
|
||||||
# Verify health
|
|
||||||
docker-compose exec postgres pg_isready -U sexy
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** Extensions not loading
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Rebuild bundle
|
|
||||||
pnpm --filter @sexy.pivoine.art/bundle build
|
|
||||||
|
|
||||||
# Verify volume mount
|
|
||||||
docker-compose exec directus ls -la /directus/extensions/
|
|
||||||
|
|
||||||
# Restart Directus
|
|
||||||
docker-compose restart directus
|
|
||||||
```
|
|
||||||
|
|
||||||
### Production
|
|
||||||
|
|
||||||
**Problem:** Services not accessible via domain
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check Traefik labels
|
|
||||||
docker inspect sexy_frontend | grep traefik
|
|
||||||
|
|
||||||
# Verify compose_network exists
|
|
||||||
docker network ls | grep compose_network
|
|
||||||
|
|
||||||
# Check Traefik is running
|
|
||||||
docker ps | grep traefik
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** Can't connect to external database
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Test connection from Directus container
|
|
||||||
docker-compose exec directus sh
|
|
||||||
apk add postgresql-client
|
|
||||||
psql -h $CORE_DB_HOST -U $DB_USER -d $SEXY_DB_NAME
|
|
||||||
```
|
|
||||||
|
|
||||||
**Problem:** Frontend can't reach Directus API
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check Directus is accessible
|
|
||||||
curl https://sexy.pivoine.art/api/server/health
|
|
||||||
|
|
||||||
# Verify CORS settings
|
|
||||||
# PUBLIC_API_URL should match the public Directus URL
|
|
||||||
```
|
|
||||||
|
|
||||||
## Migration from Old Setup
|
|
||||||
|
|
||||||
If migrating from `docker-compose.production.yml`:
|
|
||||||
|
|
||||||
1. **Rename environment variables** according to `.env.production.example`
|
|
||||||
2. **Update command** to use both compose files
|
|
||||||
3. **Verify Traefik labels** match your setup
|
|
||||||
4. **Test** with `docker-compose config` to see merged configuration
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Validate configuration
|
|
||||||
docker-compose -f compose.yml -f compose.production.yml --env-file .env.production config
|
|
||||||
|
|
||||||
# Deploy
|
|
||||||
docker-compose -f compose.yml -f compose.production.yml --env-file .env.production up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
## Best Practices
|
|
||||||
|
|
||||||
### Local Development
|
|
||||||
1. Use default credentials (they're fine for local)
|
|
||||||
2. Keep `EXTENSIONS_AUTO_RELOAD=true` for quick iteration
|
|
||||||
3. Run frontend via `pnpm dev` for hot reload
|
|
||||||
4. Restart Directus after bundle changes
|
|
||||||
|
|
||||||
### Production
|
|
||||||
1. Use strong passwords for database and admin
|
|
||||||
2. Set `EXTENSIONS_AUTO_RELOAD=false` for stability
|
|
||||||
3. Use GHCR images for frontend
|
|
||||||
4. Enable Gzip compression via Traefik
|
|
||||||
5. Monitor logs regularly
|
|
||||||
6. Keep backups of uploads and database
|
|
||||||
|
|
||||||
## See Also
|
|
||||||
|
|
||||||
- [DOCKER.md](DOCKER.md) - Docker image documentation
|
|
||||||
- [QUICKSTART.md](QUICKSTART.md) - Quick start guide
|
|
||||||
- [CLAUDE.md](CLAUDE.md) - Development guide
|
|
||||||
378
DOCKER.md
378
DOCKER.md
@@ -1,378 +0,0 @@
|
|||||||
# Docker Deployment Guide
|
|
||||||
|
|
||||||
This guide covers building and deploying sexy.pivoine.art using Docker.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
The Dockerfile uses a multi-stage build process:
|
|
||||||
|
|
||||||
1. **Base stage**: Sets up Node.js and pnpm
|
|
||||||
2. **Builder stage**: Installs Rust, compiles WASM, builds all packages
|
|
||||||
3. **Runner stage**: Minimal production image with only runtime dependencies
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Docker 20.10+ with BuildKit support
|
|
||||||
- Docker Compose 2.0+ (optional, for orchestration)
|
|
||||||
|
|
||||||
## Building the Image
|
|
||||||
|
|
||||||
### Basic Build
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker build -t sexy.pivoine.art:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
### Build with Build Arguments
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker build \
|
|
||||||
--build-arg NODE_ENV=production \
|
|
||||||
-t sexy.pivoine.art:latest \
|
|
||||||
.
|
|
||||||
```
|
|
||||||
|
|
||||||
### Multi-platform Build (for ARM64 and AMD64)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker buildx build \
|
|
||||||
--platform linux/amd64,linux/arm64 \
|
|
||||||
-t sexy.pivoine.art:latest \
|
|
||||||
--push \
|
|
||||||
.
|
|
||||||
```
|
|
||||||
|
|
||||||
## Running the Container
|
|
||||||
|
|
||||||
### Run with Environment Variables
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name sexy-pivoine-frontend \
|
|
||||||
-p 3000:3000 \
|
|
||||||
-e PUBLIC_API_URL=https://api.pivoine.art \
|
|
||||||
-e PUBLIC_URL=https://sexy.pivoine.art \
|
|
||||||
-e PUBLIC_UMAMI_ID=your-umami-id \
|
|
||||||
-e LETTERSPACE_API_URL=https://api.letterspace.com/v1 \
|
|
||||||
-e LETTERSPACE_API_KEY=your-api-key \
|
|
||||||
-e LETTERSPACE_LIST_ID=your-list-id \
|
|
||||||
sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### Run with Environment File
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create .env.production from template
|
|
||||||
cp .env.production.example .env.production
|
|
||||||
|
|
||||||
# Edit .env.production with your values
|
|
||||||
nano .env.production
|
|
||||||
|
|
||||||
# Run container
|
|
||||||
docker run -d \
|
|
||||||
--name sexy-pivoine-frontend \
|
|
||||||
-p 3000:3000 \
|
|
||||||
--env-file .env.production \
|
|
||||||
sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## Docker Compose Deployment
|
|
||||||
|
|
||||||
### Using docker-compose.production.yml
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# 1. Create environment file
|
|
||||||
cp .env.production.example .env.production
|
|
||||||
|
|
||||||
# 2. Edit environment variables
|
|
||||||
nano .env.production
|
|
||||||
|
|
||||||
# 3. Build and start
|
|
||||||
docker-compose -f docker-compose.production.yml up -d --build
|
|
||||||
|
|
||||||
# 4. View logs
|
|
||||||
docker-compose -f docker-compose.production.yml logs -f frontend
|
|
||||||
|
|
||||||
# 5. Stop services
|
|
||||||
docker-compose -f docker-compose.production.yml down
|
|
||||||
```
|
|
||||||
|
|
||||||
### Scale the Application
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose -f docker-compose.production.yml up -d --scale frontend=3
|
|
||||||
```
|
|
||||||
|
|
||||||
## Environment Variables Reference
|
|
||||||
|
|
||||||
### Required Variables
|
|
||||||
|
|
||||||
| Variable | Description | Example |
|
|
||||||
|----------|-------------|---------|
|
|
||||||
| `PUBLIC_API_URL` | Directus API backend URL | `https://api.pivoine.art` |
|
|
||||||
| `PUBLIC_URL` | Frontend application URL | `https://sexy.pivoine.art` |
|
|
||||||
|
|
||||||
### Optional Variables
|
|
||||||
|
|
||||||
| Variable | Description | Example |
|
|
||||||
|----------|-------------|---------|
|
|
||||||
| `PUBLIC_UMAMI_ID` | Umami analytics tracking ID | `abc123def-456` |
|
|
||||||
| `LETTERSPACE_API_URL` | Letterspace API endpoint | `https://api.letterspace.com/v1` |
|
|
||||||
| `LETTERSPACE_API_KEY` | Letterspace authentication key | `sk_live_...` |
|
|
||||||
| `LETTERSPACE_LIST_ID` | Mailing list identifier | `list_abc123` |
|
|
||||||
| `PORT` | Application port (inside container) | `3000` |
|
|
||||||
| `HOST` | Host binding | `0.0.0.0` |
|
|
||||||
| `NODE_ENV` | Node environment | `production` |
|
|
||||||
|
|
||||||
## Health Checks
|
|
||||||
|
|
||||||
The container includes a built-in health check that pings the HTTP server every 30 seconds:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check container health
|
|
||||||
docker inspect --format='{{.State.Health.Status}}' sexy-pivoine-frontend
|
|
||||||
|
|
||||||
# View health check logs
|
|
||||||
docker inspect --format='{{json .State.Health}}' sexy-pivoine-frontend | jq
|
|
||||||
```
|
|
||||||
|
|
||||||
## Logs and Debugging
|
|
||||||
|
|
||||||
### View Container Logs
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Follow logs
|
|
||||||
docker logs -f sexy-pivoine-frontend
|
|
||||||
|
|
||||||
# Last 100 lines
|
|
||||||
docker logs --tail 100 sexy-pivoine-frontend
|
|
||||||
|
|
||||||
# With timestamps
|
|
||||||
docker logs -f --timestamps sexy-pivoine-frontend
|
|
||||||
```
|
|
||||||
|
|
||||||
### Execute Commands in Running Container
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Open shell
|
|
||||||
docker exec -it sexy-pivoine-frontend sh
|
|
||||||
|
|
||||||
# Check Node.js version
|
|
||||||
docker exec sexy-pivoine-frontend node --version
|
|
||||||
|
|
||||||
# Check environment variables
|
|
||||||
docker exec sexy-pivoine-frontend env
|
|
||||||
```
|
|
||||||
|
|
||||||
### Debug Build Issues
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Build with no cache
|
|
||||||
docker build --no-cache -t sexy.pivoine.art:latest .
|
|
||||||
|
|
||||||
# Build specific stage for debugging
|
|
||||||
docker build --target builder -t sexy.pivoine.art:builder .
|
|
||||||
|
|
||||||
# Inspect builder stage
|
|
||||||
docker run -it --rm sexy.pivoine.art:builder sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production Best Practices
|
|
||||||
|
|
||||||
### 1. Use Specific Tags
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Tag with version
|
|
||||||
docker build -t sexy.pivoine.art:1.0.0 .
|
|
||||||
docker tag sexy.pivoine.art:1.0.0 sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Image Scanning
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Scan for vulnerabilities (requires Docker Scout or Trivy)
|
|
||||||
docker scout cves sexy.pivoine.art:latest
|
|
||||||
|
|
||||||
# Or with Trivy
|
|
||||||
trivy image sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Resource Limits
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name sexy-pivoine-frontend \
|
|
||||||
-p 3000:3000 \
|
|
||||||
--memory="2g" \
|
|
||||||
--cpus="2" \
|
|
||||||
--env-file .env.production \
|
|
||||||
sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Restart Policies
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name sexy-pivoine-frontend \
|
|
||||||
--restart=unless-stopped \
|
|
||||||
-p 3000:3000 \
|
|
||||||
--env-file .env.production \
|
|
||||||
sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### 5. Use Docker Secrets (Docker Swarm)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Create secrets
|
|
||||||
echo "your-api-key" | docker secret create letterspace_api_key -
|
|
||||||
|
|
||||||
# Deploy with secrets
|
|
||||||
docker service create \
|
|
||||||
--name sexy-pivoine-frontend \
|
|
||||||
--secret letterspace_api_key \
|
|
||||||
-p 3000:3000 \
|
|
||||||
sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## Optimization Tips
|
|
||||||
|
|
||||||
### Reduce Build Time
|
|
||||||
|
|
||||||
1. **Use BuildKit cache mounts** (already enabled in Dockerfile)
|
|
||||||
2. **Leverage layer caching** - structure Dockerfile to cache dependencies
|
|
||||||
3. **Use `.dockerignore`** - exclude unnecessary files from build context
|
|
||||||
|
|
||||||
### Reduce Image Size
|
|
||||||
|
|
||||||
Current optimizations:
|
|
||||||
- Multi-stage build (builder artifacts not in final image)
|
|
||||||
- Production-only dependencies (`pnpm install --prod`)
|
|
||||||
- Minimal base image (`node:20.19.1-slim`)
|
|
||||||
- Only necessary build artifacts copied to runner
|
|
||||||
|
|
||||||
Image size breakdown:
|
|
||||||
```bash
|
|
||||||
docker images sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## CI/CD Integration
|
|
||||||
|
|
||||||
### GitHub Actions (Automated)
|
|
||||||
|
|
||||||
This repository includes automated GitHub Actions workflows for building, scanning, and managing Docker images.
|
|
||||||
|
|
||||||
**Pre-configured workflows:**
|
|
||||||
- **Build & Push** (`.github/workflows/docker-build-push.yml`)
|
|
||||||
- Automatically builds and pushes to `ghcr.io/valknarxxx/sexy`
|
|
||||||
- Triggers on push to main/develop, version tags, and PRs
|
|
||||||
- Multi-platform builds (AMD64 + ARM64)
|
|
||||||
- Smart tagging: latest, branch names, semver, commit SHAs
|
|
||||||
|
|
||||||
- **Security Scan** (`.github/workflows/docker-scan.yml`)
|
|
||||||
- Daily vulnerability scans with Trivy
|
|
||||||
- Reports to GitHub Security tab
|
|
||||||
- Scans on every release
|
|
||||||
|
|
||||||
- **Cleanup** (`.github/workflows/cleanup-images.yml`)
|
|
||||||
- Weekly cleanup of old untagged images
|
|
||||||
- Keeps last 10 versions
|
|
||||||
|
|
||||||
**Using pre-built images:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Pull latest from GitHub Container Registry
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:latest
|
|
||||||
|
|
||||||
# Pull specific version
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:v1.0.0
|
|
||||||
|
|
||||||
# Run the image
|
|
||||||
docker run -d -p 3000:3000 --env-file .env.production ghcr.io/valknarxxx/sexy:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
**Triggering builds:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Push to main → builds 'latest' tag
|
|
||||||
git push origin main
|
|
||||||
|
|
||||||
# Create version tag → builds semver tags
|
|
||||||
git tag v1.0.0 && git push origin v1.0.0
|
|
||||||
|
|
||||||
# Pull request → builds but doesn't push
|
|
||||||
```
|
|
||||||
|
|
||||||
See `.github/workflows/README.md` for detailed workflow documentation.
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Build Fails at Rust Installation
|
|
||||||
|
|
||||||
**Problem**: Rust installation fails or times out
|
|
||||||
|
|
||||||
**Solution**:
|
|
||||||
- Check internet connectivity
|
|
||||||
- Use a Rust mirror if in restricted network
|
|
||||||
- Increase build timeout
|
|
||||||
|
|
||||||
### WASM Build Fails
|
|
||||||
|
|
||||||
**Problem**: `wasm-bindgen-cli` version mismatch
|
|
||||||
|
|
||||||
**Solution**:
|
|
||||||
```dockerfile
|
|
||||||
# In Dockerfile, pin wasm-bindgen-cli version
|
|
||||||
RUN cargo install wasm-bindgen-cli --version 0.2.103
|
|
||||||
```
|
|
||||||
|
|
||||||
### Container Exits Immediately
|
|
||||||
|
|
||||||
**Problem**: Container starts then exits
|
|
||||||
|
|
||||||
**Solution**: Check logs and verify:
|
|
||||||
```bash
|
|
||||||
docker logs sexy-pivoine-frontend
|
|
||||||
|
|
||||||
# Verify build output exists
|
|
||||||
docker run -it --rm sexy.pivoine.art:latest ls -la packages/frontend/build
|
|
||||||
```
|
|
||||||
|
|
||||||
### Port Already in Use
|
|
||||||
|
|
||||||
**Problem**: Port 3000 already bound
|
|
||||||
|
|
||||||
**Solution**:
|
|
||||||
```bash
|
|
||||||
# Use different host port
|
|
||||||
docker run -d -p 8080:3000 sexy.pivoine.art:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## Maintenance
|
|
||||||
|
|
||||||
### Clean Up
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Remove stopped containers
|
|
||||||
docker container prune
|
|
||||||
|
|
||||||
# Remove unused images
|
|
||||||
docker image prune -a
|
|
||||||
|
|
||||||
# Remove build cache
|
|
||||||
docker builder prune
|
|
||||||
|
|
||||||
# Complete cleanup (use with caution)
|
|
||||||
docker system prune -a --volumes
|
|
||||||
```
|
|
||||||
|
|
||||||
### Update Base Image
|
|
||||||
|
|
||||||
Regularly update the base Node.js image:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Pull latest Node 20 LTS
|
|
||||||
docker pull node:20.19.1-slim
|
|
||||||
|
|
||||||
# Rebuild
|
|
||||||
docker build --pull -t sexy.pivoine.art:latest .
|
|
||||||
```
|
|
||||||
24
Dockerfile
24
Dockerfile
@@ -14,9 +14,10 @@ WORKDIR /app
|
|||||||
# Copy workspace configuration
|
# Copy workspace configuration
|
||||||
COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./
|
COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./
|
||||||
|
|
||||||
# Copy .env to .env.production for proper svelte compiling
|
# Create env file with placeholder values so SvelteKit knows variable names at build time
|
||||||
RUN mkdir -p ./packages/frontend
|
# Actual values are injected at runtime via process.env (adapter-node)
|
||||||
COPY packages/frontend/.env ./packages/frontend/.env.production
|
RUN mkdir -p ./packages/frontend && \
|
||||||
|
printf 'PUBLIC_API_URL=\nPUBLIC_URL=\nPUBLIC_UMAMI_ID=\nPUBLIC_UMAMI_SCRIPT=\n' > ./packages/frontend/.env
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Builder stage - compile application with Rust/WASM support
|
# Builder stage - compile application with Rust/WASM support
|
||||||
@@ -63,11 +64,8 @@ RUN pnpm --filter @sexy.pivoine.art/buttplug build
|
|||||||
# 3. Build frontend
|
# 3. Build frontend
|
||||||
RUN pnpm --filter @sexy.pivoine.art/frontend build
|
RUN pnpm --filter @sexy.pivoine.art/frontend build
|
||||||
|
|
||||||
# 4. Build Directus bundle
|
|
||||||
RUN pnpm --filter @sexy.pivoine.art/bundle build
|
|
||||||
|
|
||||||
# Prune dev dependencies for production
|
# Prune dev dependencies for production
|
||||||
RUN pnpm install -rP
|
RUN CI=true pnpm install -rP
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Runner stage - minimal production image
|
# Runner stage - minimal production image
|
||||||
@@ -77,7 +75,6 @@ FROM node:22.11.0-slim AS runner
|
|||||||
# Install dumb-init for proper signal handling
|
# Install dumb-init for proper signal handling
|
||||||
RUN apt-get update && apt-get install -y \
|
RUN apt-get update && apt-get install -y \
|
||||||
dumb-init \
|
dumb-init \
|
||||||
ffmpeg \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Create non-root user
|
# Create non-root user
|
||||||
@@ -95,18 +92,13 @@ COPY --from=builder --chown=node:node /app/pnpm-lock.yaml ./pnpm-lock.yaml
|
|||||||
COPY --from=builder --chown=node:node /app/pnpm-workspace.yaml ./pnpm-workspace.yaml
|
COPY --from=builder --chown=node:node /app/pnpm-workspace.yaml ./pnpm-workspace.yaml
|
||||||
|
|
||||||
# Create package directories
|
# Create package directories
|
||||||
RUN mkdir -p packages/frontend packages/bundle packages/buttplug
|
RUN mkdir -p packages/frontend packages/buttplug
|
||||||
|
|
||||||
# Copy frontend artifacts
|
# Copy frontend artifacts
|
||||||
COPY --from=builder --chown=node:node /app/packages/frontend/build ./packages/frontend/build
|
COPY --from=builder --chown=node:node /app/packages/frontend/build ./packages/frontend/build
|
||||||
COPY --from=builder --chown=node:node /app/packages/frontend/node_modules ./packages/frontend/node_modules
|
COPY --from=builder --chown=node:node /app/packages/frontend/node_modules ./packages/frontend/node_modules
|
||||||
COPY --from=builder --chown=node:node /app/packages/frontend/package.json ./packages/frontend/package.json
|
COPY --from=builder --chown=node:node /app/packages/frontend/package.json ./packages/frontend/package.json
|
||||||
|
|
||||||
# Copy bundle artifacts
|
|
||||||
COPY --from=builder --chown=node:node /app/packages/bundle/dist ./packages/bundle/dist
|
|
||||||
COPY --from=builder --chown=node:node /app/packages/bundle/node_modules ./packages/bundle/node_modules
|
|
||||||
COPY --from=builder --chown=node:node /app/packages/bundle/package.json ./packages/bundle/package.json
|
|
||||||
|
|
||||||
# Copy buttplug artifacts
|
# Copy buttplug artifacts
|
||||||
COPY --from=builder --chown=node:node /app/packages/buttplug/dist ./packages/buttplug/dist
|
COPY --from=builder --chown=node:node /app/packages/buttplug/dist ./packages/buttplug/dist
|
||||||
COPY --from=builder --chown=node:node /app/packages/buttplug/node_modules ./packages/buttplug/node_modules
|
COPY --from=builder --chown=node:node /app/packages/buttplug/node_modules ./packages/buttplug/node_modules
|
||||||
@@ -124,9 +116,7 @@ ENV NODE_ENV=production \
|
|||||||
ENV PUBLIC_API_URL="" \
|
ENV PUBLIC_API_URL="" \
|
||||||
PUBLIC_URL="" \
|
PUBLIC_URL="" \
|
||||||
PUBLIC_UMAMI_ID="" \
|
PUBLIC_UMAMI_ID="" \
|
||||||
LETTERSPACE_API_URL="" \
|
PUBLIC_UMAMI_SCRIPT=""
|
||||||
LETTERSPACE_API_KEY="" \
|
|
||||||
LETTERSPACE_LIST_ID=""
|
|
||||||
|
|
||||||
# Expose application port
|
# Expose application port
|
||||||
EXPOSE 3000
|
EXPOSE 3000
|
||||||
|
|||||||
73
Dockerfile.backend
Normal file
73
Dockerfile.backend
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
# syntax=docker/dockerfile:1
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Builder stage
|
||||||
|
# ============================================================================
|
||||||
|
FROM node:22.11.0-slim AS builder
|
||||||
|
|
||||||
|
RUN npm install -g corepack@latest && corepack enable
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Copy all package manifests so pnpm can resolve the workspace lockfile,
|
||||||
|
# but use --ignore-scripts to skip buttplug's Rust/WASM build entirely.
|
||||||
|
COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./
|
||||||
|
COPY packages/backend/package.json ./packages/backend/package.json
|
||||||
|
COPY packages/frontend/package.json ./packages/frontend/package.json
|
||||||
|
COPY packages/buttplug/package.json ./packages/buttplug/package.json
|
||||||
|
COPY packages/types/package.json ./packages/types/package.json
|
||||||
|
|
||||||
|
RUN pnpm install --frozen-lockfile --filter @sexy.pivoine.art/backend --ignore-scripts
|
||||||
|
|
||||||
|
# Rebuild native bindings (argon2, sharp)
|
||||||
|
RUN pnpm rebuild argon2 sharp
|
||||||
|
|
||||||
|
COPY packages/types ./packages/types
|
||||||
|
COPY packages/backend ./packages/backend
|
||||||
|
|
||||||
|
RUN pnpm --filter @sexy.pivoine.art/backend build
|
||||||
|
|
||||||
|
RUN CI=true pnpm install --frozen-lockfile --filter @sexy.pivoine.art/backend --prod --ignore-scripts
|
||||||
|
|
||||||
|
RUN pnpm rebuild argon2 sharp
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Runner stage
|
||||||
|
# ============================================================================
|
||||||
|
FROM node:22.11.0-slim AS runner
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y \
|
||||||
|
dumb-init \
|
||||||
|
ffmpeg \
|
||||||
|
wget \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN userdel -r node && \
|
||||||
|
groupadd -r -g 1000 node && \
|
||||||
|
useradd -r -u 1000 -g node -m -d /home/node -s /bin/bash node
|
||||||
|
|
||||||
|
WORKDIR /home/node/app
|
||||||
|
|
||||||
|
RUN mkdir -p packages/backend
|
||||||
|
|
||||||
|
COPY --from=builder --chown=node:node /app/node_modules ./node_modules
|
||||||
|
COPY --from=builder --chown=node:node /app/package.json ./package.json
|
||||||
|
COPY --from=builder --chown=node:node /app/packages/backend/dist ./packages/backend/dist
|
||||||
|
COPY --from=builder --chown=node:node /app/packages/backend/node_modules ./packages/backend/node_modules
|
||||||
|
COPY --from=builder --chown=node:node /app/packages/backend/package.json ./packages/backend/package.json
|
||||||
|
COPY --from=builder --chown=node:node /app/packages/backend/src/migrations ./packages/backend/migrations
|
||||||
|
|
||||||
|
RUN mkdir -p /data/uploads && chown node:node /data/uploads
|
||||||
|
|
||||||
|
USER node
|
||||||
|
|
||||||
|
ENV NODE_ENV=production \
|
||||||
|
PORT=4000
|
||||||
|
|
||||||
|
EXPOSE 4000
|
||||||
|
|
||||||
|
HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \
|
||||||
|
CMD wget --no-verbose --tries=1 --spider http://localhost:4000/health
|
||||||
|
|
||||||
|
ENTRYPOINT ["dumb-init", "--"]
|
||||||
|
CMD ["node", "packages/backend/dist/index.js"]
|
||||||
334
QUICKSTART.md
334
QUICKSTART.md
@@ -1,334 +0,0 @@
|
|||||||
# Quick Start Guide
|
|
||||||
|
|
||||||
Get sexy.pivoine.art running in under 5 minutes using pre-built Docker images.
|
|
||||||
|
|
||||||
## Prerequisites
|
|
||||||
|
|
||||||
- Docker 20.10+
|
|
||||||
- Docker Compose 2.0+ (optional)
|
|
||||||
|
|
||||||
## Option 1: Docker Run (Fastest)
|
|
||||||
|
|
||||||
### Step 1: Pull the Image
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 2: Create Environment File
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cat > .env.production << EOF
|
|
||||||
PUBLIC_API_URL=https://api.your-domain.com
|
|
||||||
PUBLIC_URL=https://your-domain.com
|
|
||||||
PUBLIC_UMAMI_ID=
|
|
||||||
LETTERSPACE_API_URL=
|
|
||||||
LETTERSPACE_API_KEY=
|
|
||||||
LETTERSPACE_LIST_ID=
|
|
||||||
EOF
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 3: Run the Container
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name sexy-pivoine \
|
|
||||||
-p 3000:3000 \
|
|
||||||
--env-file .env.production \
|
|
||||||
--restart unless-stopped \
|
|
||||||
ghcr.io/valknarxxx/sexy:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 4: Verify
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check if running
|
|
||||||
docker ps | grep sexy-pivoine
|
|
||||||
|
|
||||||
# Check logs
|
|
||||||
docker logs -f sexy-pivoine
|
|
||||||
|
|
||||||
# Test the application
|
|
||||||
curl http://localhost:3000
|
|
||||||
```
|
|
||||||
|
|
||||||
Your application is now running at `http://localhost:3000` 🎉
|
|
||||||
|
|
||||||
## Option 2: Docker Compose (Recommended)
|
|
||||||
|
|
||||||
### Step 1: Download docker-compose.production.yml
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -O https://raw.githubusercontent.com/valknarxxx/sexy/main/docker-compose.production.yml
|
|
||||||
```
|
|
||||||
|
|
||||||
Or if you have the repository:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /path/to/sexy.pivoine.art
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 2: Create Environment File
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp .env.production.example .env.production
|
|
||||||
nano .env.production # Edit with your values
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 3: Start Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose -f docker-compose.production.yml up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 4: Monitor
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# View logs
|
|
||||||
docker-compose -f docker-compose.production.yml logs -f
|
|
||||||
|
|
||||||
# Check status
|
|
||||||
docker-compose -f docker-compose.production.yml ps
|
|
||||||
```
|
|
||||||
|
|
||||||
Your application is now running at `http://localhost:3000` 🎉
|
|
||||||
|
|
||||||
## Accessing Private Images
|
|
||||||
|
|
||||||
If the image is in a private registry:
|
|
||||||
|
|
||||||
### Step 1: Create GitHub Personal Access Token
|
|
||||||
|
|
||||||
1. Go to https://github.com/settings/tokens
|
|
||||||
2. Click "Generate new token (classic)"
|
|
||||||
3. Select scope: `read:packages`
|
|
||||||
4. Generate and copy the token
|
|
||||||
|
|
||||||
### Step 2: Login to GitHub Container Registry
|
|
||||||
|
|
||||||
```bash
|
|
||||||
echo YOUR_GITHUB_TOKEN | docker login ghcr.io -u YOUR_GITHUB_USERNAME --password-stdin
|
|
||||||
```
|
|
||||||
|
|
||||||
### Step 3: Pull and Run
|
|
||||||
|
|
||||||
Now you can pull private images:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## Environment Variables
|
|
||||||
|
|
||||||
### Required
|
|
||||||
|
|
||||||
| Variable | Description | Example |
|
|
||||||
|----------|-------------|---------|
|
|
||||||
| `PUBLIC_API_URL` | Directus API endpoint | `https://api.pivoine.art` |
|
|
||||||
| `PUBLIC_URL` | Frontend URL | `https://sexy.pivoine.art` |
|
|
||||||
|
|
||||||
### Optional
|
|
||||||
|
|
||||||
| Variable | Description | Example |
|
|
||||||
|----------|-------------|---------|
|
|
||||||
| `PUBLIC_UMAMI_ID` | Analytics tracking ID | `abc-123-def` |
|
|
||||||
| `LETTERSPACE_API_URL` | Newsletter API | `https://api.letterspace.com/v1` |
|
|
||||||
| `LETTERSPACE_API_KEY` | Newsletter API key | `sk_live_...` |
|
|
||||||
| `LETTERSPACE_LIST_ID` | Mailing list ID | `list_abc123` |
|
|
||||||
|
|
||||||
## Common Commands
|
|
||||||
|
|
||||||
### View Logs
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Follow logs (Docker Run)
|
|
||||||
docker logs -f sexy-pivoine
|
|
||||||
|
|
||||||
# Follow logs (Docker Compose)
|
|
||||||
docker-compose -f docker-compose.production.yml logs -f
|
|
||||||
```
|
|
||||||
|
|
||||||
### Restart Container
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Docker Run
|
|
||||||
docker restart sexy-pivoine
|
|
||||||
|
|
||||||
# Docker Compose
|
|
||||||
docker-compose -f docker-compose.production.yml restart
|
|
||||||
```
|
|
||||||
|
|
||||||
### Stop Container
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Docker Run
|
|
||||||
docker stop sexy-pivoine
|
|
||||||
|
|
||||||
# Docker Compose
|
|
||||||
docker-compose -f docker-compose.production.yml down
|
|
||||||
```
|
|
||||||
|
|
||||||
### Update to Latest Version
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Docker Run
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:latest
|
|
||||||
docker stop sexy-pivoine
|
|
||||||
docker rm sexy-pivoine
|
|
||||||
# Then re-run the docker run command from Step 3
|
|
||||||
|
|
||||||
# Docker Compose
|
|
||||||
docker-compose -f docker-compose.production.yml pull
|
|
||||||
docker-compose -f docker-compose.production.yml up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### Shell Access
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Docker Run
|
|
||||||
docker exec -it sexy-pivoine sh
|
|
||||||
|
|
||||||
# Docker Compose
|
|
||||||
docker-compose -f docker-compose.production.yml exec frontend sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Available Image Tags
|
|
||||||
|
|
||||||
| Tag | Description | Use Case |
|
|
||||||
|-----|-------------|----------|
|
|
||||||
| `latest` | Latest stable build from main | Production |
|
|
||||||
| `v1.0.0` | Specific version | Production (pinned) |
|
|
||||||
| `develop` | Latest from develop branch | Staging |
|
|
||||||
| `main-abc123` | Specific commit | Testing |
|
|
||||||
|
|
||||||
**Best Practice:** Use version tags in production for predictable deployments.
|
|
||||||
|
|
||||||
## Production Deployment
|
|
||||||
|
|
||||||
### 1. Use Version Tags
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Instead of :latest
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:v1.0.0
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Add Resource Limits
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker run -d \
|
|
||||||
--name sexy-pivoine \
|
|
||||||
-p 3000:3000 \
|
|
||||||
--env-file .env.production \
|
|
||||||
--memory="2g" \
|
|
||||||
--cpus="2" \
|
|
||||||
--restart unless-stopped \
|
|
||||||
ghcr.io/valknarxxx/sexy:v1.0.0
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Use a Reverse Proxy
|
|
||||||
|
|
||||||
Example with nginx:
|
|
||||||
|
|
||||||
```nginx
|
|
||||||
server {
|
|
||||||
listen 80;
|
|
||||||
server_name sexy.pivoine.art;
|
|
||||||
|
|
||||||
location / {
|
|
||||||
proxy_pass http://localhost:3000;
|
|
||||||
proxy_http_version 1.1;
|
|
||||||
proxy_set_header Upgrade $http_upgrade;
|
|
||||||
proxy_set_header Connection 'upgrade';
|
|
||||||
proxy_set_header Host $host;
|
|
||||||
proxy_cache_bypass $http_upgrade;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Enable HTTPS
|
|
||||||
|
|
||||||
Use Certbot or similar:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
certbot --nginx -d sexy.pivoine.art
|
|
||||||
```
|
|
||||||
|
|
||||||
## Health Check
|
|
||||||
|
|
||||||
The container includes a built-in health check:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check container health
|
|
||||||
docker inspect --format='{{.State.Health.Status}}' sexy-pivoine
|
|
||||||
```
|
|
||||||
|
|
||||||
Possible statuses:
|
|
||||||
- `starting` - Container just started
|
|
||||||
- `healthy` - Application is responding
|
|
||||||
- `unhealthy` - Application is not responding
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Container Exits Immediately
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check logs
|
|
||||||
docker logs sexy-pivoine
|
|
||||||
|
|
||||||
# Common issues:
|
|
||||||
# - Missing environment variables
|
|
||||||
# - Port 3000 already in use
|
|
||||||
# - Invalid environment variable values
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cannot Pull Image
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# For private images, ensure you're logged in
|
|
||||||
docker login ghcr.io
|
|
||||||
|
|
||||||
# Check if image exists
|
|
||||||
docker pull ghcr.io/valknarxxx/sexy:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
### Port Already in Use
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Use a different port
|
|
||||||
docker run -d -p 8080:3000 ghcr.io/valknarxxx/sexy:latest
|
|
||||||
|
|
||||||
# Or find what's using port 3000
|
|
||||||
lsof -i :3000
|
|
||||||
```
|
|
||||||
|
|
||||||
### Application Not Accessible
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check if container is running
|
|
||||||
docker ps | grep sexy-pivoine
|
|
||||||
|
|
||||||
# Check logs
|
|
||||||
docker logs sexy-pivoine
|
|
||||||
|
|
||||||
# Verify port mapping
|
|
||||||
docker port sexy-pivoine
|
|
||||||
|
|
||||||
# Test from inside container
|
|
||||||
docker exec sexy-pivoine wget -O- http://localhost:3000
|
|
||||||
```
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
- **Production setup:** See [DOCKER.md](DOCKER.md)
|
|
||||||
- **Development:** See [CLAUDE.md](CLAUDE.md)
|
|
||||||
- **CI/CD:** See [.github/workflows/README.md](.github/workflows/README.md)
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
- **Issues:** https://github.com/valknarxxx/sexy/issues
|
|
||||||
- **Discussions:** https://github.com/valknarxxx/sexy/discussions
|
|
||||||
- **Security:** Report privately via GitHub Security tab
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
See [LICENSE](LICENSE) file for details.
|
|
||||||
326
README.md
326
README.md
@@ -4,7 +4,7 @@
|
|||||||
|
|
||||||

|

|
||||||
|
|
||||||
*"Lust und Liebe gehören zusammen - wer das eine verteufelt, zerstört auch das andere."*
|
_"Lust und Liebe gehören zusammen - wer das eine verteufelt, zerstört auch das andere."_
|
||||||
— **Beate Uhse**, Pionierin der sexuellen Befreiung ✈️
|
— **Beate Uhse**, Pionierin der sexuellen Befreiung ✈️
|
||||||
|
|
||||||
---
|
---
|
||||||
@@ -13,10 +13,10 @@
|
|||||||
|
|
||||||
Built with passion, technology, and the fearless spirit of sexual empowerment
|
Built with passion, technology, and the fearless spirit of sexual empowerment
|
||||||
|
|
||||||
[](https://github.com/valknarxxx/sexy.pivoine.art/actions/workflows/docker-build-push.yml)
|
[](https://dev.pivoine.art/valknar/sexy/actions)
|
||||||
[](https://github.com/valknarxxx/sexy.pivoine.art/actions/workflows/docker-scan.yml)
|
[](https://dev.pivoine.art/valknar/sexy/actions)
|
||||||
[](LICENSE)
|
[](LICENSE)
|
||||||
[](http://sexy.pivoine.art)
|
[](https://sexy.pivoine.art)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -24,21 +24,23 @@ Built with passion, technology, and the fearless spirit of sexual empowerment
|
|||||||
|
|
||||||
## 👅 What Is This Delicious Creation?
|
## 👅 What Is This Delicious Creation?
|
||||||
|
|
||||||
Welcome, dear pleasure-seeker! This is **sexy.pivoine.art** — a modern, sensual platform combining the elegance of **SvelteKit**, the power of **Directus CMS**, and the intimate connection of **Buttplug.io** hardware integration.
|
Welcome, dear pleasure-seeker! This is **sexy.pivoine.art** — a modern, sensual platform built from the ground up with full control over every intimate detail. A **SvelteKit** frontend caresses a purpose-built **Fastify + GraphQL** backend, while **Buttplug.io** hardware integration brings the experience into the physical world.
|
||||||
|
|
||||||
Like Beate Uhse breaking barriers in post-war Germany, we believe in the freedom to explore, create, and celebrate sexuality without shame. This platform is built for **models**, **creators**, and **connoisseurs** of adult content who deserve technology as sophisticated as their desires.
|
Like Beate Uhse breaking barriers in post-war Germany, we believe in the freedom to explore, create, and celebrate sexuality without shame. This platform is built for **models**, **creators**, and **connoisseurs** of adult content who deserve technology as sophisticated as their desires.
|
||||||
|
|
||||||
### ♉ Features That'll Make You Blush ♊
|
### ♉ Features That'll Make You Blush ♊
|
||||||
|
|
||||||
- 💖 **Sensual SvelteKit Frontend** with Tailwind CSS 4 styling
|
- 💖 **Sensual SvelteKit Frontend** with Tailwind CSS 4 styling
|
||||||
- 🗄️ **Headless CMS** powered by Directus for content liberation
|
- ⚡ **Purpose-built GraphQL Backend** — lean, fast, no CMS overhead
|
||||||
|
- 🔐 **Session-based Auth** with Redis & Argon2 — discretion guaranteed
|
||||||
|
- 🖼️ **Smart Image Transforms** via Sharp (WebP, multiple presets, cached)
|
||||||
- 🎮 **Hardware Integration** via Buttplug.io (yes, really!)
|
- 🎮 **Hardware Integration** via Buttplug.io (yes, really!)
|
||||||
- 🌐 **Multi-Platform Support** (AMD64 + ARM64) — pleasure everywhere
|
|
||||||
- 🔒 **Session-Based Authentication** — discretion guaranteed
|
|
||||||
- 📱 **Responsive Design** that looks sexy on any device
|
- 📱 **Responsive Design** that looks sexy on any device
|
||||||
- 🌍 **Internationalization** — pleasure speaks all languages
|
- 🌍 **Internationalization** — pleasure speaks all languages
|
||||||
|
- 🏆 **Gamification** — achievements, leaderboards, and reward points
|
||||||
|
- 💬 **Comments & Social** — build your community
|
||||||
- 📊 **Analytics Integration** (Umami) — know your admirers
|
- 📊 **Analytics Integration** (Umami) — know your admirers
|
||||||
- 📧 **Newsletter Integration** (Letterspace) — stay connected
|
- 🐳 **Self-hosted CI/CD** via Gitea Actions on `dev.pivoine.art`
|
||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
@@ -49,15 +51,21 @@ Like Beate Uhse breaking barriers in post-war Germany, we believe in the freedom
|
|||||||
```
|
```
|
||||||
┌─────────────────────────────────────────────────────────────┐
|
┌─────────────────────────────────────────────────────────────┐
|
||||||
│ 💋 Frontend Layer │
|
│ 💋 Frontend Layer │
|
||||||
│ ├─ SvelteKit 2.0 → Smooth as silk │
|
│ ├─ SvelteKit 2 → Smooth as silk │
|
||||||
│ ├─ Tailwind CSS 4 → Styled to seduce │
|
│ ├─ Tailwind CSS 4 → Styled to seduce │
|
||||||
│ ├─ bits-ui Components → Building blocks of pleasure │
|
│ ├─ bits-ui Components → Building blocks of pleasure │
|
||||||
|
│ ├─ graphql-request v7 → Whispering to the backend │
|
||||||
│ └─ Vite → Fast and furious │
|
│ └─ Vite → Fast and furious │
|
||||||
├─────────────────────────────────────────────────────────────┤
|
├─────────────────────────────────────────────────────────────┤
|
||||||
│ 🍷 Backend Layer │
|
│ 🍷 Backend Layer │
|
||||||
│ ├─ Directus CMS → Content with no limits │
|
│ ├─ Fastify v5 → The fastest penetration │
|
||||||
│ ├─ Custom Extensions → Bespoke pleasures │
|
│ ├─ GraphQL Yoga v5 → Flexible positions │
|
||||||
│ └─ PostgreSQL → Data deep and secure │
|
│ ├─ Pothos (code-first) → Schema with intention │
|
||||||
|
│ ├─ Drizzle ORM → Data with grace │
|
||||||
|
│ ├─ PostgreSQL 16 → Deep and persistent │
|
||||||
|
│ ├─ Redis → Sessions that never forget │
|
||||||
|
│ ├─ Sharp → Images transformed beautifully │
|
||||||
|
│ └─ Argon2 → Passwords hashed with passion │
|
||||||
├─────────────────────────────────────────────────────────────┤
|
├─────────────────────────────────────────────────────────────┤
|
||||||
│ 🎀 Hardware Layer │
|
│ 🎀 Hardware Layer │
|
||||||
│ ├─ Buttplug.io → Real connections │
|
│ ├─ Buttplug.io → Real connections │
|
||||||
@@ -66,8 +74,8 @@ Like Beate Uhse breaking barriers in post-war Germany, we believe in the freedom
|
|||||||
├─────────────────────────────────────────────────────────────┤
|
├─────────────────────────────────────────────────────────────┤
|
||||||
│ 🌸 DevOps Layer │
|
│ 🌸 DevOps Layer │
|
||||||
│ ├─ Docker → Containerized ecstasy │
|
│ ├─ Docker → Containerized ecstasy │
|
||||||
│ ├─ GitHub Actions → Automated seduction │
|
│ ├─ Gitea Actions → Self-hosted seduction │
|
||||||
│ └─ GHCR → Images served hot │
|
│ └─ dev.pivoine.art → Our own pleasure palace │
|
||||||
└─────────────────────────────────────────────────────────────┘
|
└─────────────────────────────────────────────────────────────┘
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -75,46 +83,49 @@ Like Beate Uhse breaking barriers in post-war Germany, we believe in the freedom
|
|||||||
|
|
||||||
## 🔥 Quick Start — Get Intimate Fast
|
## 🔥 Quick Start — Get Intimate Fast
|
||||||
|
|
||||||
### 💕 Option 1: Using Docker (Recommended)
|
### 💕 Option 1: Using Docker Compose (Recommended)
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Pull the pleasure
|
# Clone the repository
|
||||||
docker pull ghcr.io/valknarxxx/sexy:latest
|
git clone https://dev.pivoine.art/valknar/sexy.git
|
||||||
|
cd sexy.pivoine.art
|
||||||
|
|
||||||
# Run with passion
|
# Configure your secrets
|
||||||
docker run -d -p 3000:3000 \
|
cp .env.example .env
|
||||||
-e PUBLIC_API_URL=https://api.your-domain.com \
|
# Edit .env with your intimate details
|
||||||
-e PUBLIC_URL=https://your-domain.com \
|
|
||||||
ghcr.io/valknarxxx/sexy:latest
|
# Awaken all services (postgres, redis, backend, frontend)
|
||||||
|
docker compose up -d
|
||||||
|
|
||||||
# Visit your creation at http://localhost:3000 💋
|
# Visit your creation at http://localhost:3000 💋
|
||||||
```
|
```
|
||||||
|
|
||||||
See [QUICKSTART.md](QUICKSTART.md) for the full seduction guide.
|
|
||||||
|
|
||||||
### 💜 Option 2: Local Development
|
### 💜 Option 2: Local Development
|
||||||
|
|
||||||
**Prerequisites:**
|
**Prerequisites:**
|
||||||
|
|
||||||
1. Node.js 20.19.1 — *the foundation*
|
1. Node.js 20.19.1 — _the foundation_
|
||||||
2. `corepack enable` — *unlock the tools*
|
2. `corepack enable` — _unlock the tools_
|
||||||
3. `pnpm install` — *gather your ingredients*
|
3. `pnpm install` — _gather your ingredients_
|
||||||
4. Rust + `cargo install wasm-bindgen-cli` — *forge the connection*
|
4. PostgreSQL 16 + Redis — _the data lovers_
|
||||||
|
|
||||||
**Start your pleasure journey:**
|
**Start your pleasure journey:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Awaken all services
|
# Awaken data services
|
||||||
pnpm dev
|
pnpm dev:data
|
||||||
|
|
||||||
# Or tease them one by one
|
# Start the backend (port 4000)
|
||||||
pnpm dev:data # The foundation
|
pnpm dev:backend
|
||||||
pnpm dev:directus # The content
|
|
||||||
pnpm --filter @sexy.pivoine.art/frontend dev # The face
|
# Start the frontend (port 3000, proxied to :4000)
|
||||||
|
pnpm --filter @sexy.pivoine.art/frontend dev
|
||||||
```
|
```
|
||||||
|
|
||||||
Visit `http://localhost:3000` and let the experience begin... 💋
|
Visit `http://localhost:3000` and let the experience begin... 💋
|
||||||
|
|
||||||
|
GraphQL playground is available at `http://localhost:4000/graphql` — explore every query.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🌹 Project Structure
|
## 🌹 Project Structure
|
||||||
@@ -124,100 +135,116 @@ This monorepo contains three packages, each serving its purpose:
|
|||||||
```
|
```
|
||||||
sexy.pivoine.art/
|
sexy.pivoine.art/
|
||||||
├─ 💄 packages/frontend/ → SvelteKit app (the seduction)
|
├─ 💄 packages/frontend/ → SvelteKit app (the seduction)
|
||||||
├─ 🎭 packages/bundle/ → Directus extensions (the power)
|
├─ ⚡ packages/backend/ → Fastify + GraphQL API (the engine)
|
||||||
└─ 🎮 packages/buttplug/ → Hardware control (the connection)
|
└─ 🎮 packages/buttplug/ → Hardware control (the connection)
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
### 💄 Frontend (`packages/frontend/`)
|
||||||
|
|
||||||
## 📚 Documentation — Your Guide to Pleasure
|
SvelteKit 2 application with server-side rendering, i18n, and a clean component library.
|
||||||
|
Communicates with the backend exclusively via GraphQL using `graphql-request`.
|
||||||
|
Assets served via `/api/assets/:id?transform=<preset>` — no CDN, no Directus, just raw power.
|
||||||
|
|
||||||
<div align="center">
|
### ⚡ Backend (`packages/backend/`)
|
||||||
|
|
||||||
| Document | Purpose | Emoji |
|
Purpose-built Fastify v5 + GraphQL Yoga server. All business logic lives here:
|
||||||
|----------|---------|-------|
|
auth, file uploads, video processing, comments, gamification, and analytics.
|
||||||
| [QUICKSTART.md](QUICKSTART.md) | Get wet... I mean, get started! | 💦 |
|
Files stored as `<UPLOAD_DIR>/<uuid>/<filename>` with on-demand WebP transforms cached on disk.
|
||||||
| [COMPOSE.md](COMPOSE.md) | Docker Compose setup guide | 🐳 |
|
|
||||||
| [DOCKER.md](DOCKER.md) | Standalone Docker deployment | 🐋 |
|
|
||||||
| [CLAUDE.md](CLAUDE.md) | Architecture & development | 🤖 |
|
|
||||||
| [.github/workflows/README.md](.github/workflows/README.md) | CI/CD workflows | ⚙️ |
|
|
||||||
|
|
||||||
</div>
|
### 🎮 Buttplug (`packages/buttplug/`)
|
||||||
|
|
||||||
|
Hybrid TypeScript/Rust package for intimate hardware control via WebBluetooth.
|
||||||
|
Compiled to WebAssembly for browser-based Bluetooth device communication.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🎨 Building — Craft Your Masterpiece
|
## 🗃️ Database Schema
|
||||||
|
|
||||||
### Build All Packages
|
Built with Drizzle ORM — clean tables, no `directus_` prefix, full control:
|
||||||
|
|
||||||
```bash
|
|
||||||
# Prepare everything
|
|
||||||
pnpm install
|
|
||||||
|
|
||||||
# Build the WASM foundation
|
|
||||||
pnpm --filter @sexy.pivoine.art/buttplug build:wasm
|
|
||||||
|
|
||||||
# Build the packages
|
|
||||||
pnpm --filter @sexy.pivoine.art/buttplug build
|
|
||||||
pnpm --filter @sexy.pivoine.art/frontend build
|
|
||||||
pnpm --filter @sexy.pivoine.art/bundle build
|
|
||||||
```
|
```
|
||||||
|
users → profiles, roles (model/viewer/admin), auth tokens
|
||||||
### Build Docker Image
|
files → uploaded assets with metadata and duration
|
||||||
|
videos → content with model junctions, likes, plays
|
||||||
```bash
|
articles → magazine / editorial content
|
||||||
# Quick build
|
recordings → user-created content with play tracking
|
||||||
./build.sh
|
comments → threaded by collection + item_id
|
||||||
|
achievements → gamification goals
|
||||||
# Manual control
|
user_points → points ledger
|
||||||
docker build -t sexy.pivoine.art:latest .
|
user_stats → cached leaderboard data
|
||||||
|
|
||||||
# Multi-platform pleasure
|
|
||||||
docker buildx build --platform linux/amd64,linux/arm64 -t sexy.pivoine.art:latest .
|
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🚀 Deployment — Share Your Creation
|
## 🔐 Authentication Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
POST /graphql (login mutation)
|
||||||
|
→ verify argon2 password hash
|
||||||
|
→ nanoid(32) session token
|
||||||
|
→ SET session:<token> <user JSON> EX 86400 in Redis
|
||||||
|
→ set httpOnly cookie: session_token
|
||||||
|
→ return CurrentUser
|
||||||
|
|
||||||
|
Every request:
|
||||||
|
→ read session_token cookie
|
||||||
|
→ GET session:<token> from Redis
|
||||||
|
→ inject currentUser into GraphQL context
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🖼️ Image Transforms
|
||||||
|
|
||||||
|
Assets are transformed on first request and cached as WebP:
|
||||||
|
|
||||||
|
| Preset | Size | Fit | Use |
|
||||||
|
| ----------- | ----------- | ------ | ---------------- |
|
||||||
|
| `mini` | 80×80 | cover | Avatars in lists |
|
||||||
|
| `thumbnail` | 300×300 | cover | Profile photos |
|
||||||
|
| `preview` | 800px wide | inside | Video teasers |
|
||||||
|
| `medium` | 1400px wide | inside | Full-size images |
|
||||||
|
| `banner` | 1600×480 | cover | Profile banners |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 🚀 Deployment
|
||||||
|
|
||||||
### Production with Docker Compose
|
### Production with Docker Compose
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Configure your secrets
|
# Configure your secrets
|
||||||
cp .env.production.example .env.production
|
cp .env.example .env.production
|
||||||
# Edit .env.production with your intimate details
|
# Edit .env.production — set DB credentials, SMTP, cookie secret, CORS origin
|
||||||
|
|
||||||
# Deploy with grace (uses Traefik for routing)
|
# Deploy
|
||||||
docker-compose -f compose.production.yml --env-file .env.production up -d
|
docker compose --env-file .env.production up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
### Production without Docker
|
Key environment variables for the backend:
|
||||||
|
|
||||||
```bash
|
```env
|
||||||
# Build everything
|
DATABASE_URL=postgresql://sexy:sexy@postgres:5432/sexy
|
||||||
pnpm build:frontend
|
REDIS_URL=redis://redis:6379
|
||||||
|
COOKIE_SECRET=your-very-secret-key
|
||||||
# Start serving
|
CORS_ORIGIN=https://sexy.pivoine.art
|
||||||
pnpm --filter @sexy.pivoine.art/frontend start
|
UPLOAD_DIR=/data/uploads
|
||||||
|
SMTP_HOST=your.smtp.host
|
||||||
|
SMTP_PORT=587
|
||||||
|
EMAIL_FROM=noreply@sexy.pivoine.art
|
||||||
|
PUBLIC_URL=https://sexy.pivoine.art
|
||||||
```
|
```
|
||||||
|
|
||||||
---
|
### 🎬 CI/CD — Self-Hosted Seduction
|
||||||
|
|
||||||
## 🌈 Environment Variables
|
Automated builds run on **[dev.pivoine.art](https://dev.pivoine.art/valknar/sexy)** via Gitea Actions:
|
||||||
|
|
||||||
### 💖 Required (The Essentials)
|
- ✅ Frontend image → `dev.pivoine.art/valknar/sexy:latest`
|
||||||
|
- ✅ Backend image → `dev.pivoine.art/valknar/sexy-backend:latest`
|
||||||
|
- ✅ Triggers on push to `main`, `develop`, or version tags (`v*.*.*`)
|
||||||
|
- ✅ Build cache via registry for fast successive builds
|
||||||
|
|
||||||
- `PUBLIC_API_URL` — Your Directus backend
|
Images are pulled on the production server via Watchtower or manual `docker compose pull && docker compose up -d`.
|
||||||
- `PUBLIC_URL` — Your frontend domain
|
|
||||||
|
|
||||||
### 💜 Optional (The Extras)
|
|
||||||
|
|
||||||
- `PUBLIC_UMAMI_ID` — Analytics tracking
|
|
||||||
- `LETTERSPACE_API_URL` — Newsletter endpoint
|
|
||||||
- `LETTERSPACE_API_KEY` — Newsletter key
|
|
||||||
- `LETTERSPACE_LIST_ID` — Mailing list
|
|
||||||
|
|
||||||
See [.env.production.example](.env.production.example) for the full configuration.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -228,60 +255,54 @@ graph LR
|
|||||||
A[💡 Idea] --> B[💻 Code]
|
A[💡 Idea] --> B[💻 Code]
|
||||||
B --> C[🧪 Test Locally]
|
B --> C[🧪 Test Locally]
|
||||||
C --> D[🌿 Feature Branch]
|
C --> D[🌿 Feature Branch]
|
||||||
D --> E[📤 Push & PR]
|
D --> E[📤 Push to dev.pivoine.art]
|
||||||
E --> F{✅ CI Pass?}
|
E --> F{✅ Build Pass?}
|
||||||
F -->|Yes| G[🔀 Merge to Main]
|
F -->|Yes| G[🔀 Merge to Main]
|
||||||
F -->|No| B
|
F -->|No| B
|
||||||
G --> H[🚀 Auto Deploy]
|
G --> H[🚀 Images Built & Pushed]
|
||||||
H --> I[🏷️ Tag Release]
|
H --> I[🎉 Deploy to Production]
|
||||||
I --> J[🎉 Celebrate]
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Create → `git checkout -b feature/my-sexy-feature`
|
1. Create → `git checkout -b feature/my-sexy-feature`
|
||||||
2. Develop → Write beautiful code
|
2. Develop → Write beautiful code
|
||||||
3. Test → `pnpm dev`
|
3. Test → `pnpm dev:data && pnpm dev:backend && pnpm dev`
|
||||||
4. Push → Create PR (triggers CI build)
|
4. Push → `git push` to `dev.pivoine.art` (triggers CI build)
|
||||||
5. Merge → Automatic deployment to production
|
5. Merge → Images published, deploy to production
|
||||||
6. Release → `git tag v1.0.0 && git push origin v1.0.0`
|
6. Release → `git tag v1.0.0 && git push origin v1.0.0`
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## 🔐 Security — Protected Pleasure
|
## 🌈 Environment Variables
|
||||||
|
|
||||||
- 🛡️ Daily vulnerability scans with Trivy
|
### Backend (required)
|
||||||
- 🔒 Non-root Docker containers
|
|
||||||
- 📊 Security reports in GitHub Security tab
|
|
||||||
- 🤐 Confidential issue reporting available
|
|
||||||
|
|
||||||
*Report security concerns privately via GitHub Security.*
|
| Variable | Description |
|
||||||
|
| --------------- | ----------------------------- |
|
||||||
|
| `DATABASE_URL` | PostgreSQL connection string |
|
||||||
|
| `REDIS_URL` | Redis connection string |
|
||||||
|
| `COOKIE_SECRET` | Session cookie signing secret |
|
||||||
|
| `CORS_ORIGIN` | Allowed frontend origin |
|
||||||
|
| `UPLOAD_DIR` | Path for uploaded files |
|
||||||
|
|
||||||
---
|
### Backend (optional)
|
||||||
|
|
||||||
## 💝 Contributing — Join the Movement
|
| Variable | Default | Description |
|
||||||
|
| ------------ | ------- | ------------------------------ |
|
||||||
|
| `PORT` | `4000` | Backend listen port |
|
||||||
|
| `LOG_LEVEL` | `info` | Fastify log level |
|
||||||
|
| `SMTP_HOST` | — | Email server for auth flows |
|
||||||
|
| `SMTP_PORT` | `587` | Email server port |
|
||||||
|
| `EMAIL_FROM` | — | Sender address |
|
||||||
|
| `PUBLIC_URL` | — | Frontend URL (for email links) |
|
||||||
|
|
||||||
Like Beate Uhse fought for sexual liberation, we welcome contributors who believe in freedom, pleasure, and quality code.
|
### Frontend
|
||||||
|
|
||||||
1. **Fork** this repository
|
| Variable | Description |
|
||||||
2. **Create** your feature branch
|
| --------------------- | --------------------------------------------- |
|
||||||
3. **Commit** your changes
|
| `PUBLIC_API_URL` | Backend URL (e.g. `http://sexy_backend:4000`) |
|
||||||
4. **Push** to your branch
|
| `PUBLIC_URL` | Frontend public URL |
|
||||||
5. **Submit** a pull request
|
| `PUBLIC_UMAMI_ID` | Umami analytics site ID (optional) |
|
||||||
|
| `PUBLIC_UMAMI_SCRIPT` | Umami script URL (optional) |
|
||||||
All contributors are bound by our code of conduct: **Respect, Consent, and Quality.**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 CI/CD Pipeline — Automated Seduction
|
|
||||||
|
|
||||||
Our GitHub Actions workflows handle:
|
|
||||||
|
|
||||||
- ✅ Multi-platform Docker builds (AMD64 + ARM64)
|
|
||||||
- ✅ Automated publishing to GHCR
|
|
||||||
- ✅ Daily security vulnerability scans
|
|
||||||
- ✅ Weekly cleanup of old images
|
|
||||||
- ✅ Semantic versioning from git tags
|
|
||||||
|
|
||||||
**Images available at:** `ghcr.io/valknarxxx/sexy`
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -291,20 +312,25 @@ Our GitHub Actions workflows handle:
|
|||||||
|
|
||||||
### 🌸 Created with Love by 🌸
|
### 🌸 Created with Love by 🌸
|
||||||
|
|
||||||
**[Palina](http://sexy.pivoine.art) & [Valknar](http://sexy.pivoine.art)**
|
**[Palina](https://sexy.pivoine.art) & [Valknar](https://sexy.pivoine.art)**
|
||||||
|
|
||||||
*Für die Mäuse...* 🐭💕
|
_Für die Mäuse..._ 🐭💕
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 🙏 Built With
|
### 🙏 Built With
|
||||||
|
|
||||||
| Technology | Purpose |
|
| Technology | Purpose |
|
||||||
|------------|---------|
|
| --------------------------------------------------------- | -------------------- |
|
||||||
| [SvelteKit](https://kit.svelte.dev/) | Framework |
|
| [SvelteKit](https://kit.svelte.dev/) | Frontend framework |
|
||||||
| [Directus](https://directus.io/) | CMS |
|
| [Fastify](https://fastify.dev/) | HTTP server |
|
||||||
| [Buttplug.io](https://buttplug.io/) | Hardware |
|
| [GraphQL Yoga](https://the-guild.dev/graphql/yoga-server) | GraphQL server |
|
||||||
| [bits-ui](https://www.bits-ui.com/) | Components |
|
| [Pothos](https://pothos-graphql.dev/) | Code-first schema |
|
||||||
|
| [Drizzle ORM](https://orm.drizzle.team/) | Database |
|
||||||
|
| [Sharp](https://sharp.pixelplumbing.com/) | Image transforms |
|
||||||
|
| [Buttplug.io](https://buttplug.io/) | Hardware |
|
||||||
|
| [bits-ui](https://www.bits-ui.com/) | UI components |
|
||||||
|
| [Gitea](https://dev.pivoine.art) | Self-hosted VCS & CI |
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -313,7 +339,7 @@ Our GitHub Actions workflows handle:
|
|||||||
Pioneer of sexual liberation (1919-2001)
|
Pioneer of sexual liberation (1919-2001)
|
||||||
Pilot, Entrepreneur, Freedom Fighter
|
Pilot, Entrepreneur, Freedom Fighter
|
||||||
|
|
||||||
*"Eine Frau, die ihre Sexualität selbstbestimmt lebt, ist eine freie Frau."*
|
_"Eine Frau, die ihre Sexualität selbstbestimmt lebt, ist eine freie Frau."_
|
||||||
|
|
||||||

|

|
||||||
|
|
||||||
@@ -334,9 +360,9 @@ Pilot, Entrepreneur, Freedom Fighter
|
|||||||
|
|
||||||
<div align="center">
|
<div align="center">
|
||||||
|
|
||||||
[](https://github.com/valknarxxx/sexy.pivoine.art/issues)
|
[](https://dev.pivoine.art/valknar/sexy)
|
||||||
[](https://github.com/valknarxxx/sexy.pivoine.art/discussions)
|
[](https://dev.pivoine.art/valknar/sexy/issues)
|
||||||
[](http://sexy.pivoine.art)
|
[](https://sexy.pivoine.art)
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
@@ -355,8 +381,8 @@ Pilot, Entrepreneur, Freedom Fighter
|
|||||||
╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝
|
╚═════╝ ╚══════╝╚═╝ ╚═╝ ╚═╝
|
||||||
</pre>
|
</pre>
|
||||||
|
|
||||||
*Pleasure is a human right. Technology is freedom. Together, they are power.*
|
_Pleasure is a human right. Technology is freedom. Together, they are power._
|
||||||
|
|
||||||
**[sexy.pivoine.art](http://sexy.pivoine.art)** | © 2025 Palina & Valknar
|
**[sexy.pivoine.art](https://sexy.pivoine.art)** | © 2025 Palina & Valknar
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
265
REBUILD_GUIDE.md
265
REBUILD_GUIDE.md
@@ -1,265 +0,0 @@
|
|||||||
# 🔄 Rebuild Guide - When You Need to Rebuild the Image
|
|
||||||
|
|
||||||
## Why Rebuild?
|
|
||||||
|
|
||||||
SvelteKit's `PUBLIC_*` environment variables are **baked into the JavaScript** at build time. You need to rebuild when:
|
|
||||||
|
|
||||||
1. ✅ Changing `PUBLIC_API_URL`
|
|
||||||
2. ✅ Changing `PUBLIC_URL`
|
|
||||||
3. ✅ Changing `PUBLIC_UMAMI_ID`
|
|
||||||
4. ✅ Changing any `LETTERSPACE_*` variables
|
|
||||||
5. ❌ NOT needed for Directus env vars (those are runtime)
|
|
||||||
|
|
||||||
## Quick Rebuild Process
|
|
||||||
|
|
||||||
### 1. Update Frontend Environment Variables
|
|
||||||
|
|
||||||
Edit the frontend `.env` file:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
nano packages/frontend/.env
|
|
||||||
```
|
|
||||||
|
|
||||||
Set your production values:
|
|
||||||
```bash
|
|
||||||
PUBLIC_API_URL=https://sexy.pivoine.art/api
|
|
||||||
PUBLIC_URL=https://sexy.pivoine.art
|
|
||||||
PUBLIC_UMAMI_ID=your-umami-id
|
|
||||||
LETTERSPACE_API_URL=https://api.letterspace.com/v1
|
|
||||||
LETTERSPACE_API_KEY=your-key
|
|
||||||
LETTERSPACE_LIST_ID=your-list-id
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Rebuild the Image
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# From the project root
|
|
||||||
docker build -t ghcr.io/valknarxxx/sexy:latest -t sexy.pivoine.art:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
**Expected Time:** 30-45 minutes (first build), 10-15 minutes (cached rebuild)
|
|
||||||
|
|
||||||
### 3. Restart Services
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# If using docker-compose
|
|
||||||
cd /home/valknar/Projects/docker-compose/sexy
|
|
||||||
docker compose down
|
|
||||||
docker compose up -d
|
|
||||||
|
|
||||||
# Or directly
|
|
||||||
docker stop sexy_frontend
|
|
||||||
docker rm sexy_frontend
|
|
||||||
docker compose up -d frontend
|
|
||||||
```
|
|
||||||
|
|
||||||
## Monitoring the Build
|
|
||||||
|
|
||||||
### Check Build Progress
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Watch build output
|
|
||||||
docker build -t ghcr.io/valknarxxx/sexy:latest .
|
|
||||||
|
|
||||||
# Build stages:
|
|
||||||
# 1. Base (~30s) - Node.js setup
|
|
||||||
# 2. Builder (~25-40min) - Rust + WASM + packages
|
|
||||||
# - Rust installation: ~2-3 min
|
|
||||||
# - wasm-bindgen-cli: ~10-15 min
|
|
||||||
# - WASM build: ~5-10 min
|
|
||||||
# - Package builds: ~5-10 min
|
|
||||||
# 3. Runner (~2min) - Final image assembly
|
|
||||||
```
|
|
||||||
|
|
||||||
### Verify Environment Variables in Built Image
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check what PUBLIC_API_URL is baked in
|
|
||||||
docker run --rm ghcr.io/valknarxxx/sexy:latest sh -c \
|
|
||||||
"grep -r 'PUBLIC_API_URL' /home/node/app/packages/frontend/build/ | head -3"
|
|
||||||
|
|
||||||
# Should show: https://sexy.pivoine.art/api
|
|
||||||
```
|
|
||||||
|
|
||||||
## Push to GitHub Container Registry
|
|
||||||
|
|
||||||
After successful build:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Login to GHCR (first time only)
|
|
||||||
echo $GITHUB_TOKEN | docker login ghcr.io -u valknarxxx --password-stdin
|
|
||||||
|
|
||||||
# Push the image
|
|
||||||
docker push ghcr.io/valknarxxx/sexy:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
## Alternative: Build Arguments (Future Enhancement)
|
|
||||||
|
|
||||||
To avoid rebuilding for every env change, consider adding build arguments:
|
|
||||||
|
|
||||||
```dockerfile
|
|
||||||
# In Dockerfile, before building frontend:
|
|
||||||
ARG PUBLIC_API_URL=https://sexy.pivoine.art/api
|
|
||||||
ARG PUBLIC_URL=https://sexy.pivoine.art
|
|
||||||
ARG PUBLIC_UMAMI_ID=
|
|
||||||
|
|
||||||
# Create .env.production dynamically
|
|
||||||
RUN echo "PUBLIC_API_URL=${PUBLIC_API_URL}" > packages/frontend/.env.production && \
|
|
||||||
echo "PUBLIC_URL=${PUBLIC_URL}" >> packages/frontend/.env.production && \
|
|
||||||
echo "PUBLIC_UMAMI_ID=${PUBLIC_UMAMI_ID}" >> packages/frontend/.env.production
|
|
||||||
```
|
|
||||||
|
|
||||||
Then build with:
|
|
||||||
```bash
|
|
||||||
docker build \
|
|
||||||
--build-arg PUBLIC_API_URL=https://sexy.pivoine.art/api \
|
|
||||||
--build-arg PUBLIC_URL=https://sexy.pivoine.art \
|
|
||||||
-t ghcr.io/valknarxxx/sexy:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
### Build Fails at Rust Installation
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check network connectivity
|
|
||||||
ping -c 3 sh.rustup.rs
|
|
||||||
|
|
||||||
# Build with verbose output
|
|
||||||
docker build --progress=plain -t ghcr.io/valknarxxx/sexy:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
### Build Fails at WASM
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check if wasm-bindgen-cli matches package.json version
|
|
||||||
docker run --rm rust:latest cargo install wasm-bindgen-cli --version 0.2.103
|
|
||||||
```
|
|
||||||
|
|
||||||
### Frontend Still Shows Wrong URL
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Verify .env file is correct
|
|
||||||
cat packages/frontend/.env
|
|
||||||
|
|
||||||
# Check if old image is cached
|
|
||||||
docker images | grep sexy
|
|
||||||
docker rmi ghcr.io/valknarxxx/sexy:old-tag
|
|
||||||
|
|
||||||
# Force rebuild without cache
|
|
||||||
docker build --no-cache -t ghcr.io/valknarxxx/sexy:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
### Container Starts But Can't Connect to API
|
|
||||||
|
|
||||||
1. Check Traefik routing:
|
|
||||||
```bash
|
|
||||||
docker logs traefik | grep sexy
|
|
||||||
```
|
|
||||||
|
|
||||||
2. Check if Directus is accessible:
|
|
||||||
```bash
|
|
||||||
curl -I https://sexy.pivoine.art/api/server/health
|
|
||||||
```
|
|
||||||
|
|
||||||
3. Check frontend logs:
|
|
||||||
```bash
|
|
||||||
docker logs sexy_frontend
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development vs Production
|
|
||||||
|
|
||||||
### Development (Local)
|
|
||||||
- Use `pnpm dev` for hot reload
|
|
||||||
- No rebuild needed for code changes
|
|
||||||
- Env vars from `.env` or shell
|
|
||||||
|
|
||||||
### Production (Docker)
|
|
||||||
- Rebuild required for PUBLIC_* changes
|
|
||||||
- Changes baked into JavaScript
|
|
||||||
- Env vars from `packages/frontend/.env`
|
|
||||||
|
|
||||||
## Optimization Tips
|
|
||||||
|
|
||||||
### Speed Up Rebuilds
|
|
||||||
|
|
||||||
1. **Use BuildKit cache:**
|
|
||||||
```bash
|
|
||||||
export DOCKER_BUILDKIT=1
|
|
||||||
docker build --build-arg BUILDKIT_INLINE_CACHE=1 -t ghcr.io/valknarxxx/sexy:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Multi-stage caching:**
|
|
||||||
- Dockerfile already optimized with multi-stage build
|
|
||||||
- Dependencies cached separately from code
|
|
||||||
|
|
||||||
3. **Parallel builds:**
|
|
||||||
```bash
|
|
||||||
# Build with more CPU cores
|
|
||||||
docker build --cpus 4 -t ghcr.io/valknarxxx/sexy:latest .
|
|
||||||
```
|
|
||||||
|
|
||||||
### Reduce Image Size
|
|
||||||
|
|
||||||
Current optimizations:
|
|
||||||
- ✅ Multi-stage build
|
|
||||||
- ✅ Production dependencies only
|
|
||||||
- ✅ Minimal base image
|
|
||||||
- ✅ No dev tools in final image
|
|
||||||
|
|
||||||
Expected sizes:
|
|
||||||
- Base: ~100MB
|
|
||||||
- Builder: ~2-3GB (not shipped)
|
|
||||||
- Runner: ~300-500MB (final)
|
|
||||||
|
|
||||||
## Automation
|
|
||||||
|
|
||||||
### GitHub Actions (Already Set Up)
|
|
||||||
|
|
||||||
The `.github/workflows/docker-build-push.yml` automatically:
|
|
||||||
1. Builds on push to main
|
|
||||||
2. Creates version tags
|
|
||||||
3. Pushes to GHCR
|
|
||||||
4. Caches layers for faster builds
|
|
||||||
|
|
||||||
**Trigger a rebuild:**
|
|
||||||
```bash
|
|
||||||
git tag v1.0.1
|
|
||||||
git push origin v1.0.1
|
|
||||||
```
|
|
||||||
|
|
||||||
### Local Build Script
|
|
||||||
|
|
||||||
Use the provided `build.sh`:
|
|
||||||
```bash
|
|
||||||
./build.sh -t v1.0.0 -p
|
|
||||||
```
|
|
||||||
|
|
||||||
## When NOT to Rebuild
|
|
||||||
|
|
||||||
You DON'T need to rebuild for:
|
|
||||||
- ❌ Directus configuration changes
|
|
||||||
- ❌ Database credentials
|
|
||||||
- ❌ Redis settings
|
|
||||||
- ❌ SMTP settings
|
|
||||||
- ❌ Session cookie settings
|
|
||||||
- ❌ Traefik labels
|
|
||||||
|
|
||||||
These are runtime environment variables and can be changed in docker-compose.
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
| Change | Rebuild Needed | How to Apply |
|
|
||||||
|--------|----------------|--------------|
|
|
||||||
| `PUBLIC_API_URL` | ✅ Yes | Rebuild image |
|
|
||||||
| `PUBLIC_URL` | ✅ Yes | Rebuild image |
|
|
||||||
| `PUBLIC_UMAMI_ID` | ✅ Yes | Rebuild image |
|
|
||||||
| `LETTERSPACE_*` | ✅ Yes | Rebuild image |
|
|
||||||
| `SEXY_DIRECTUS_*` | ❌ No | Restart container |
|
|
||||||
| `DB_*` | ❌ No | Restart container |
|
|
||||||
| `EMAIL_*` | ❌ No | Restart container |
|
|
||||||
| Traefik labels | ❌ No | Restart container |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Remember:** The key difference is **build-time** (compiled into JS) vs **runtime** (read from environment).
|
|
||||||
130
build.sh
130
build.sh
@@ -1,130 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Build script for sexy.pivoine.art Docker image
|
|
||||||
|
|
||||||
set -e # Exit on error
|
|
||||||
|
|
||||||
# Colors for output
|
|
||||||
RED='\033[0;31m'
|
|
||||||
GREEN='\033[0;32m'
|
|
||||||
YELLOW='\033[1;33m'
|
|
||||||
NC='\033[0m' # No Color
|
|
||||||
|
|
||||||
# Default values
|
|
||||||
IMAGE_NAME="sexy.pivoine.art"
|
|
||||||
TAG="latest"
|
|
||||||
PUSH=false
|
|
||||||
PLATFORM=""
|
|
||||||
|
|
||||||
# Parse arguments
|
|
||||||
while [[ $# -gt 0 ]]; do
|
|
||||||
case $1 in
|
|
||||||
-t|--tag)
|
|
||||||
TAG="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
-n|--name)
|
|
||||||
IMAGE_NAME="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
-p|--push)
|
|
||||||
PUSH=true
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
--platform)
|
|
||||||
PLATFORM="$2"
|
|
||||||
shift 2
|
|
||||||
;;
|
|
||||||
-h|--help)
|
|
||||||
echo "Usage: $0 [OPTIONS]"
|
|
||||||
echo ""
|
|
||||||
echo "Options:"
|
|
||||||
echo " -t, --tag TAG Set image tag (default: latest)"
|
|
||||||
echo " -n, --name NAME Set image name (default: sexy.pivoine.art)"
|
|
||||||
echo " -p, --push Push image after build"
|
|
||||||
echo " --platform PLATFORM Build for specific platform (e.g., linux/amd64,linux/arm64)"
|
|
||||||
echo " -h, --help Show this help message"
|
|
||||||
echo ""
|
|
||||||
echo "Examples:"
|
|
||||||
echo " $0 # Build with defaults"
|
|
||||||
echo " $0 -t v1.0.0 # Build with version tag"
|
|
||||||
echo " $0 --platform linux/amd64,linux/arm64 -p # Multi-platform build and push"
|
|
||||||
exit 0
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
echo -e "${RED}Unknown option: $1${NC}"
|
|
||||||
exit 1
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
done
|
|
||||||
|
|
||||||
FULL_IMAGE="${IMAGE_NAME}:${TAG}"
|
|
||||||
|
|
||||||
echo -e "${GREEN}=== Building Docker Image ===${NC}"
|
|
||||||
echo "Image: ${FULL_IMAGE}"
|
|
||||||
echo "Platform: ${PLATFORM:-default}"
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
# Check if Docker is running
|
|
||||||
if ! docker info > /dev/null 2>&1; then
|
|
||||||
echo -e "${RED}Error: Docker is not running${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Build command
|
|
||||||
BUILD_CMD="docker build"
|
|
||||||
|
|
||||||
if [ -n "$PLATFORM" ]; then
|
|
||||||
# Multi-platform build requires buildx
|
|
||||||
echo -e "${YELLOW}Using buildx for multi-platform build${NC}"
|
|
||||||
BUILD_CMD="docker buildx build --platform ${PLATFORM}"
|
|
||||||
|
|
||||||
if [ "$PUSH" = true ]; then
|
|
||||||
BUILD_CMD="${BUILD_CMD} --push"
|
|
||||||
fi
|
|
||||||
else
|
|
||||||
# Regular build
|
|
||||||
if [ "$PUSH" = true ]; then
|
|
||||||
echo -e "${YELLOW}Note: --push only works with multi-platform builds. Use 'docker push' after build.${NC}"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Execute build
|
|
||||||
echo -e "${GREEN}Building...${NC}"
|
|
||||||
$BUILD_CMD -t "${FULL_IMAGE}" .
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo -e "${GREEN}✓ Build successful!${NC}"
|
|
||||||
echo "Image: ${FULL_IMAGE}"
|
|
||||||
|
|
||||||
# Show image size
|
|
||||||
if [ -z "$PLATFORM" ]; then
|
|
||||||
SIZE=$(docker images "${FULL_IMAGE}" --format "{{.Size}}")
|
|
||||||
echo "Size: ${SIZE}"
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Push if requested and not multi-platform
|
|
||||||
if [ "$PUSH" = true ] && [ -z "$PLATFORM" ]; then
|
|
||||||
echo -e "${GREEN}Pushing image...${NC}"
|
|
||||||
docker push "${FULL_IMAGE}"
|
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo -e "${GREEN}✓ Push successful!${NC}"
|
|
||||||
else
|
|
||||||
echo -e "${RED}✗ Push failed${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo -e "${GREEN}Next steps:${NC}"
|
|
||||||
echo "1. Run locally:"
|
|
||||||
echo " docker run -d -p 3000:3000 --env-file .env.production ${FULL_IMAGE}"
|
|
||||||
echo ""
|
|
||||||
echo "2. Run with docker-compose:"
|
|
||||||
echo " docker-compose -f docker-compose.production.yml up -d"
|
|
||||||
echo ""
|
|
||||||
echo "3. View logs:"
|
|
||||||
echo " docker logs -f <container-name>"
|
|
||||||
else
|
|
||||||
echo -e "${RED}✗ Build failed${NC}"
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
@@ -1,130 +0,0 @@
|
|||||||
include:
|
|
||||||
- compose.yml
|
|
||||||
|
|
||||||
# Production compose file - extends base compose.yml
|
|
||||||
# Usage: docker-compose -f compose.production.yml up -d
|
|
||||||
|
|
||||||
networks:
|
|
||||||
compose_network:
|
|
||||||
external: true
|
|
||||||
name: compose_network
|
|
||||||
|
|
||||||
services:
|
|
||||||
# Disable local postgres for production (use external DB)
|
|
||||||
postgres:
|
|
||||||
deploy:
|
|
||||||
replicas: 0
|
|
||||||
|
|
||||||
# Disable local redis for production (use external Redis)
|
|
||||||
redis:
|
|
||||||
deploy:
|
|
||||||
replicas: 0
|
|
||||||
|
|
||||||
# Override Directus for production
|
|
||||||
directus:
|
|
||||||
networks:
|
|
||||||
- compose_network
|
|
||||||
ports: [] # Remove exposed ports, use Traefik instead
|
|
||||||
|
|
||||||
# Override volumes for production paths
|
|
||||||
volumes:
|
|
||||||
- ${SEXY_DIRECTUS_UPLOADS:-./uploads}:/directus/uploads
|
|
||||||
- ${SEXY_DIRECTUS_BUNDLE:-./packages/bundle/dist}:/directus/extensions/sexy.pivoine.art
|
|
||||||
|
|
||||||
# Override environment for production settings
|
|
||||||
environment:
|
|
||||||
# Database (external)
|
|
||||||
DB_HOST: ${CORE_DB_HOST}
|
|
||||||
DB_PORT: ${CORE_DB_PORT:-5432}
|
|
||||||
DB_DATABASE: ${SEXY_DB_NAME}
|
|
||||||
DB_USER: ${DB_USER}
|
|
||||||
DB_PASSWORD: ${DB_PASSWORD}
|
|
||||||
|
|
||||||
# General
|
|
||||||
SECRET: ${SEXY_DIRECTUS_SECRET}
|
|
||||||
ADMIN_EMAIL: ${ADMIN_EMAIL}
|
|
||||||
ADMIN_PASSWORD: ${ADMIN_PASSWORD}
|
|
||||||
PUBLIC_URL: ${SEXY_PUBLIC_URL}
|
|
||||||
|
|
||||||
# Cache (external Redis)
|
|
||||||
REDIS: redis://${CORE_REDIS_HOST}:${CORE_REDIS_PORT:-6379}
|
|
||||||
|
|
||||||
# CORS
|
|
||||||
CORS_ORIGIN: ${SEXY_CORS_ORIGIN}
|
|
||||||
|
|
||||||
# Security (production settings)
|
|
||||||
SESSION_COOKIE_SECURE: ${SEXY_SESSION_COOKIE_SECURE:-true}
|
|
||||||
SESSION_COOKIE_SAME_SITE: ${SEXY_SESSION_COOKIE_SAME_SITE:-strict}
|
|
||||||
SESSION_COOKIE_DOMAIN: ${SEXY_SESSION_COOKIE_DOMAIN}
|
|
||||||
|
|
||||||
# Extensions
|
|
||||||
EXTENSIONS_AUTO_RELOAD: ${SEXY_EXTENSIONS_AUTO_RELOAD:-false}
|
|
||||||
|
|
||||||
# Email (production SMTP)
|
|
||||||
EMAIL_TRANSPORT: ${EMAIL_TRANSPORT:-smtp}
|
|
||||||
EMAIL_FROM: ${EMAIL_FROM}
|
|
||||||
EMAIL_SMTP_HOST: ${EMAIL_SMTP_HOST}
|
|
||||||
EMAIL_SMTP_PORT: ${EMAIL_SMTP_PORT:-587}
|
|
||||||
EMAIL_SMTP_USER: ${EMAIL_SMTP_USER}
|
|
||||||
EMAIL_SMTP_PASSWORD: ${EMAIL_SMTP_PASSWORD}
|
|
||||||
|
|
||||||
# User URLs
|
|
||||||
USER_REGISTER_URL_ALLOW_LIST: ${SEXY_USER_REGISTER_URL_ALLOW_LIST}
|
|
||||||
PASSWORD_RESET_URL_ALLOW_LIST: ${SEXY_PASSWORD_RESET_URL_ALLOW_LIST}
|
|
||||||
|
|
||||||
# Remove local dependencies
|
|
||||||
depends_on: []
|
|
||||||
|
|
||||||
labels:
|
|
||||||
# Traefik labels for reverse proxy
|
|
||||||
- 'traefik.enable=${SEXY_TRAEFIK_ENABLED:-true}'
|
|
||||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-redirect-web-secure.redirectscheme.scheme=https'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-redirect-web-secure'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web.rule=Host(`${SEXY_TRAEFIK_HOST}`) && PathPrefix(`/api`)'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web.entrypoints=web'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.rule=Host(`${SEXY_TRAEFIK_HOST}`) && PathPrefix(`/api`)'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.tls.certresolver=resolver'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.entrypoints=web-secure'
|
|
||||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure-compress.compress=true'
|
|
||||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-strip.stripprefix.prefixes=/api'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-strip,${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure-compress'
|
|
||||||
- 'traefik.http.services.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-api-web-secure.loadbalancer.server.port=8055'
|
|
||||||
- 'traefik.docker.network=compose_network'
|
|
||||||
|
|
||||||
# Override Frontend for production
|
|
||||||
frontend:
|
|
||||||
networks:
|
|
||||||
- compose_network
|
|
||||||
ports: [] # Remove exposed ports, use Traefik instead
|
|
||||||
|
|
||||||
# Override environment for production
|
|
||||||
environment:
|
|
||||||
NODE_ENV: production
|
|
||||||
PUBLIC_API_URL: ${SEXY_FRONTEND_PUBLIC_API_URL}
|
|
||||||
PUBLIC_URL: ${SEXY_FRONTEND_PUBLIC_URL}
|
|
||||||
PUBLIC_UMAMI_ID: ${SEXY_FRONTEND_PUBLIC_UMAMI_ID:-}
|
|
||||||
LETTERSPACE_API_URL: ${SEXY_FRONTEND_LETTERSPACE_API_URL:-}
|
|
||||||
LETTERSPACE_API_KEY: ${SEXY_FRONTEND_LETTERSPACE_API_KEY:-}
|
|
||||||
LETTERSPACE_LIST_ID: ${SEXY_FRONTEND_LETTERSPACE_LIST_ID:-}
|
|
||||||
|
|
||||||
# Override volume for production path
|
|
||||||
volumes:
|
|
||||||
- ${SEXY_FRONTEND_PATH:-/var/www/sexy.pivoine.art}:/home/node/app
|
|
||||||
|
|
||||||
# Remove local dependency
|
|
||||||
depends_on: []
|
|
||||||
|
|
||||||
labels:
|
|
||||||
# Traefik labels for reverse proxy
|
|
||||||
- 'traefik.enable=${SEXY_TRAEFIK_ENABLED:-true}'
|
|
||||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-redirect-web-secure.redirectscheme.scheme=https'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-redirect-web-secure'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web.rule=Host(`${SEXY_TRAEFIK_HOST}`)'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web.entrypoints=web'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.rule=Host(`${SEXY_TRAEFIK_HOST}`)'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.tls.certresolver=resolver'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.entrypoints=web-secure'
|
|
||||||
- 'traefik.http.middlewares.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure-compress.compress=true'
|
|
||||||
- 'traefik.http.routers.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.middlewares=${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure-compress'
|
|
||||||
- 'traefik.http.services.${SEXY_COMPOSE_PROJECT_NAME:-sexy}-frontend-web-secure.loadbalancer.server.port=3000'
|
|
||||||
- 'traefik.docker.network=compose_network'
|
|
||||||
182
compose.yml
182
compose.yml
@@ -1,183 +1,91 @@
|
|||||||
|
name: sexy
|
||||||
services:
|
services:
|
||||||
# PostgreSQL Database (local only)
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_postgres
|
container_name: sexy_postgres
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
ports:
|
||||||
- sexy-network
|
- "5432:5432"
|
||||||
volumes:
|
volumes:
|
||||||
- postgres-data:/var/lib/postgresql/data
|
- postgres_data:/var/lib/postgresql/data
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_DB: ${DB_DATABASE:-sexy}
|
POSTGRES_DB: sexy
|
||||||
POSTGRES_USER: ${DB_USER:-sexy}
|
POSTGRES_USER: sexy
|
||||||
POSTGRES_PASSWORD: ${DB_PASSWORD:-sexy}
|
POSTGRES_PASSWORD: sexy
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD-SHELL", "pg_isready -U ${DB_USER:-sexy}"]
|
test: ["CMD-SHELL", "pg_isready -U sexy"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
|
||||||
# Redis Cache (local only)
|
|
||||||
redis:
|
redis:
|
||||||
image: redis:7-alpine
|
image: redis:7-alpine
|
||||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_redis
|
container_name: sexy_redis
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
ports:
|
||||||
- sexy-network
|
- "6379:6379"
|
||||||
volumes:
|
volumes:
|
||||||
- redis-data:/data
|
- redis_data:/data
|
||||||
command: redis-server --appendonly yes
|
command: redis-server --appendonly yes
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "redis-cli", "ping"]
|
test: ["CMD", "redis-cli", "ping"]
|
||||||
interval: 10s
|
interval: 10s
|
||||||
timeout: 5s
|
timeout: 5s
|
||||||
retries: 5
|
retries: 5
|
||||||
|
backend:
|
||||||
# Directus CMS
|
build:
|
||||||
directus:
|
context: .
|
||||||
image: ${SEXY_DIRECTUS_IMAGE:-directus/directus:11}
|
dockerfile: Dockerfile.backend
|
||||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_api
|
container_name: sexy_backend
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
networks:
|
|
||||||
- sexy-network
|
|
||||||
ports:
|
ports:
|
||||||
- "8055:8055"
|
- "4000:4000"
|
||||||
volumes:
|
volumes:
|
||||||
- directus-uploads:/directus/uploads
|
- uploads_data:/data/uploads
|
||||||
- ${SEXY_DIRECTUS_BUNDLE:-./packages/bundle}:/directus/extensions/sexy.pivoine.art
|
|
||||||
environment:
|
environment:
|
||||||
# Database
|
DATABASE_URL: postgresql://sexy:sexy@sexy_postgres:5432/sexy
|
||||||
DB_CLIENT: pg
|
REDIS_URL: redis://sexy_redis:6379
|
||||||
DB_HOST: ${CORE_DB_HOST:-postgres}
|
UPLOAD_DIR: /data/uploads
|
||||||
DB_PORT: ${CORE_DB_PORT:-5432}
|
CORS_ORIGIN: http://localhost:3000
|
||||||
DB_DATABASE: ${SEXY_DB_NAME:-sexy}
|
PORT: 4000
|
||||||
DB_USER: ${DB_USER:-sexy}
|
NODE_ENV: production
|
||||||
DB_PASSWORD: ${DB_PASSWORD:-sexy}
|
COOKIE_SECRET: change-me-in-production
|
||||||
|
SMTP_HOST: localhost
|
||||||
# General
|
SMTP_PORT: 587
|
||||||
SECRET: ${SEXY_DIRECTUS_SECRET:-replace-with-random-secret-min-32-chars}
|
EMAIL_FROM: noreply@sexy.pivoine.art
|
||||||
ADMIN_EMAIL: ${ADMIN_EMAIL:-admin@sexy.pivoine.art}
|
PUBLIC_URL: http://localhost:3000
|
||||||
ADMIN_PASSWORD: ${ADMIN_PASSWORD:-admin}
|
|
||||||
PUBLIC_URL: ${SEXY_PUBLIC_URL:-http://localhost:8055}
|
|
||||||
|
|
||||||
# Cache
|
|
||||||
CACHE_ENABLED: ${SEXY_CACHE_ENABLED:-true}
|
|
||||||
CACHE_AUTO_PURGE: ${SEXY_CACHE_AUTO_PURGE:-true}
|
|
||||||
CACHE_STORE: redis
|
|
||||||
REDIS: redis://${CORE_REDIS_HOST:-redis}:${CORE_REDIS_PORT:-6379}
|
|
||||||
|
|
||||||
# CORS
|
|
||||||
CORS_ENABLED: ${SEXY_CORS_ENABLED:-true}
|
|
||||||
CORS_ORIGIN: ${SEXY_CORS_ORIGIN:-http://localhost:3000}
|
|
||||||
|
|
||||||
# Security
|
|
||||||
SESSION_COOKIE_SECURE: ${SEXY_SESSION_COOKIE_SECURE:-false}
|
|
||||||
SESSION_COOKIE_SAME_SITE: ${SEXY_SESSION_COOKIE_SAME_SITE:-lax}
|
|
||||||
SESSION_COOKIE_DOMAIN: ${SEXY_SESSION_COOKIE_DOMAIN:-localhost}
|
|
||||||
|
|
||||||
# Extensions
|
|
||||||
EXTENSIONS_PATH: ${SEXY_EXTENSIONS_PATH:-/directus/extensions}
|
|
||||||
EXTENSIONS_AUTO_RELOAD: ${SEXY_EXTENSIONS_AUTO_RELOAD:-true}
|
|
||||||
|
|
||||||
# WebSockets
|
|
||||||
WEBSOCKETS_ENABLED: ${SEXY_WEBSOCKETS_ENABLED:-true}
|
|
||||||
|
|
||||||
# Email (optional for local dev)
|
|
||||||
EMAIL_TRANSPORT: ${EMAIL_TRANSPORT:-sendmail}
|
|
||||||
EMAIL_FROM: ${EMAIL_FROM:-noreply@sexy.pivoine.art}
|
|
||||||
EMAIL_SMTP_HOST: ${EMAIL_SMTP_HOST:-}
|
|
||||||
EMAIL_SMTP_PORT: ${EMAIL_SMTP_PORT:-587}
|
|
||||||
EMAIL_SMTP_USER: ${EMAIL_SMTP_USER:-}
|
|
||||||
EMAIL_SMTP_PASSWORD: ${EMAIL_SMTP_PASSWORD:-}
|
|
||||||
|
|
||||||
# User Registration & Password Reset URLs
|
|
||||||
USER_REGISTER_URL_ALLOW_LIST: ${SEXY_USER_REGISTER_URL_ALLOW_LIST:-http://localhost:3000}
|
|
||||||
PASSWORD_RESET_URL_ALLOW_LIST: ${SEXY_PASSWORD_RESET_URL_ALLOW_LIST:-http://localhost:3000}
|
|
||||||
|
|
||||||
# Content Security Policy
|
|
||||||
CONTENT_SECURITY_POLICY_DIRECTIVES__FRAME_SRC: ${SEXY_CONTENT_SECURITY_POLICY_DIRECTIVES__FRAME_SRC:-}
|
|
||||||
|
|
||||||
# Timezone
|
|
||||||
TZ: ${TIMEZONE:-Europe/Amsterdam}
|
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
redis:
|
redis:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
|
|
||||||
healthcheck:
|
healthcheck:
|
||||||
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8055/server/health"]
|
test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4000/health"]
|
||||||
interval: 30s
|
interval: 30s
|
||||||
timeout: 10s
|
timeout: 10s
|
||||||
retries: 3
|
retries: 3
|
||||||
start_period: 40s
|
start_period: 20s
|
||||||
|
|
||||||
# Frontend (local development - optional, usually run via pnpm dev)
|
|
||||||
frontend:
|
frontend:
|
||||||
image: ${SEXY_FRONTEND_IMAGE:-ghcr.io/valknarxxx/sexy:latest}
|
build:
|
||||||
container_name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_frontend
|
context: .
|
||||||
|
dockerfile: Dockerfile
|
||||||
|
container_name: sexy_frontend
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
user: node
|
|
||||||
working_dir: /home/node/app/packages/frontend
|
|
||||||
networks:
|
|
||||||
- sexy-network
|
|
||||||
ports:
|
ports:
|
||||||
- "3000:3000"
|
- "3000:3000"
|
||||||
environment:
|
environment:
|
||||||
# Node
|
NODE_ENV: production
|
||||||
NODE_ENV: ${NODE_ENV:-development}
|
|
||||||
PORT: 3000
|
PORT: 3000
|
||||||
HOST: 0.0.0.0
|
HOST: 0.0.0.0
|
||||||
|
PUBLIC_API_URL: http://sexy_backend:4000
|
||||||
# Public environment variables
|
PUBLIC_URL: http://localhost:3000
|
||||||
PUBLIC_API_URL: ${SEXY_FRONTEND_PUBLIC_API_URL:-http://localhost:8055}
|
|
||||||
PUBLIC_URL: ${SEXY_FRONTEND_PUBLIC_URL:-http://localhost:3000}
|
|
||||||
PUBLIC_UMAMI_ID: ${SEXY_FRONTEND_PUBLIC_UMAMI_ID:-}
|
|
||||||
|
|
||||||
# Letterspace newsletter integration
|
|
||||||
LETTERSPACE_API_URL: ${SEXY_FRONTEND_LETTERSPACE_API_URL:-}
|
|
||||||
LETTERSPACE_API_KEY: ${SEXY_FRONTEND_LETTERSPACE_API_KEY:-}
|
|
||||||
LETTERSPACE_LIST_ID: ${SEXY_FRONTEND_LETTERSPACE_LIST_ID:-}
|
|
||||||
|
|
||||||
# Timezone
|
|
||||||
TZ: ${TIMEZONE:-Europe/Amsterdam}
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- ${SEXY_FRONTEND_PATH:-./}:/home/node/app
|
|
||||||
|
|
||||||
command: ["node", "build/index.js"]
|
|
||||||
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- directus
|
backend:
|
||||||
|
condition: service_healthy
|
||||||
healthcheck:
|
|
||||||
test: ["CMD", "node", "-e", "require('http').get('http://localhost:3000/', (r) => {process.exit(r.statusCode === 200 ? 0 : 1)})"]
|
|
||||||
interval: 30s
|
|
||||||
timeout: 3s
|
|
||||||
retries: 3
|
|
||||||
start_period: 40s
|
|
||||||
|
|
||||||
# Uncomment to run frontend in development mode with live reload
|
|
||||||
# build:
|
|
||||||
# context: .
|
|
||||||
# dockerfile: Dockerfile
|
|
||||||
# volumes:
|
|
||||||
# - ./packages/frontend:/home/node/app/packages/frontend
|
|
||||||
# - /home/node/app/packages/frontend/node_modules
|
|
||||||
# environment:
|
|
||||||
# NODE_ENV: development
|
|
||||||
|
|
||||||
networks:
|
|
||||||
sexy-network:
|
|
||||||
driver: bridge
|
|
||||||
name: ${SEXY_COMPOSE_PROJECT_NAME:-sexy}_network
|
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
directus-uploads:
|
uploads_data:
|
||||||
driver: local
|
driver: local
|
||||||
postgres-data:
|
postgres_data:
|
||||||
driver: local
|
driver: local
|
||||||
redis-data:
|
redis_data:
|
||||||
driver: local
|
driver: local
|
||||||
|
|||||||
2817
directus.yaml
2817
directus.yaml
File diff suppressed because it is too large
Load Diff
59
eslint.config.js
Normal file
59
eslint.config.js
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import js from "@eslint/js";
|
||||||
|
import ts from "typescript-eslint";
|
||||||
|
import svelte from "eslint-plugin-svelte";
|
||||||
|
import prettier from "eslint-config-prettier";
|
||||||
|
import globals from "globals";
|
||||||
|
|
||||||
|
export default ts.config(
|
||||||
|
js.configs.recommended,
|
||||||
|
...ts.configs.recommended,
|
||||||
|
...svelte.configs["flat/recommended"],
|
||||||
|
prettier,
|
||||||
|
...svelte.configs["flat/prettier"],
|
||||||
|
{
|
||||||
|
languageOptions: {
|
||||||
|
globals: {
|
||||||
|
...globals.browser,
|
||||||
|
...globals.node,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
files: ["**/*.svelte"],
|
||||||
|
languageOptions: {
|
||||||
|
parserOptions: {
|
||||||
|
parser: ts.parser,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
rules: {
|
||||||
|
// Allow unused vars prefixed with _ (common pattern for intentional ignores)
|
||||||
|
"@typescript-eslint/no-unused-vars": [
|
||||||
|
"error",
|
||||||
|
{ argsIgnorePattern: "^_", varsIgnorePattern: "^_" },
|
||||||
|
],
|
||||||
|
// Allow explicit any sparingly — we're adults here
|
||||||
|
"@typescript-eslint/no-explicit-any": "warn",
|
||||||
|
// Enforce consistent type imports
|
||||||
|
"@typescript-eslint/consistent-type-imports": [
|
||||||
|
"error",
|
||||||
|
{ prefer: "type-imports", fixStyle: "inline-type-imports" },
|
||||||
|
],
|
||||||
|
// This rule is meant for onNavigate() callbacks only; standard SvelteKit href/goto is fine
|
||||||
|
"svelte/no-navigation-without-resolve": "off",
|
||||||
|
// {@html} is used intentionally for trusted content (e.g. legal page)
|
||||||
|
"svelte/no-at-html-tags": "warn",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ignores: [
|
||||||
|
"**/build/",
|
||||||
|
"**/.svelte-kit/",
|
||||||
|
"**/dist/",
|
||||||
|
"**/node_modules/",
|
||||||
|
"**/migrations/",
|
||||||
|
"packages/buttplug/**",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
);
|
||||||
@@ -1,177 +0,0 @@
|
|||||||
-- Gamification System Schema for Sexy Recordings Platform
|
|
||||||
-- Created: 2025-10-28
|
|
||||||
-- Description: Recording-focused gamification with time-weighted scoring
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Table: sexy_recording_plays
|
|
||||||
-- ====================
|
|
||||||
-- Tracks when users play recordings (similar to video plays)
|
|
||||||
CREATE TABLE IF NOT EXISTS sexy_recording_plays (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
user_id UUID NOT NULL REFERENCES directus_users(id) ON DELETE CASCADE,
|
|
||||||
recording_id UUID NOT NULL REFERENCES sexy_recordings(id) ON DELETE CASCADE,
|
|
||||||
duration_played INTEGER, -- Duration played in milliseconds
|
|
||||||
completed BOOLEAN DEFAULT FALSE, -- True if >= 90% watched
|
|
||||||
date_created TIMESTAMP WITH TIME ZONE DEFAULT NOW(),
|
|
||||||
date_updated TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_recording_plays_user ON sexy_recording_plays(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_recording_plays_recording ON sexy_recording_plays(recording_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_recording_plays_date ON sexy_recording_plays(date_created);
|
|
||||||
|
|
||||||
COMMENT ON TABLE sexy_recording_plays IS 'Tracks user playback of recordings for analytics and gamification';
|
|
||||||
COMMENT ON COLUMN sexy_recording_plays.completed IS 'True if user watched at least 90% of the recording';
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Table: sexy_user_points
|
|
||||||
-- ====================
|
|
||||||
-- Tracks individual point-earning actions with timestamps for time-weighted scoring
|
|
||||||
CREATE TABLE IF NOT EXISTS sexy_user_points (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
user_id UUID NOT NULL REFERENCES directus_users(id) ON DELETE CASCADE,
|
|
||||||
action VARCHAR(50) NOT NULL, -- e.g., "RECORDING_CREATE", "RECORDING_PLAY", "COMMENT_CREATE"
|
|
||||||
points INTEGER NOT NULL, -- Raw points earned
|
|
||||||
recording_id UUID REFERENCES sexy_recordings(id) ON DELETE SET NULL, -- Optional reference
|
|
||||||
date_created TIMESTAMP WITH TIME ZONE DEFAULT NOW()
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_points_user ON sexy_user_points(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_points_date ON sexy_user_points(date_created);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_points_action ON sexy_user_points(action);
|
|
||||||
|
|
||||||
COMMENT ON TABLE sexy_user_points IS 'Individual point-earning actions for gamification system';
|
|
||||||
COMMENT ON COLUMN sexy_user_points.action IS 'Type of action: RECORDING_CREATE, RECORDING_PLAY, RECORDING_COMPLETE, COMMENT_CREATE, RECORDING_FEATURED';
|
|
||||||
COMMENT ON COLUMN sexy_user_points.points IS 'Raw points before time-weighted decay calculation';
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Table: sexy_achievements
|
|
||||||
-- ====================
|
|
||||||
-- Predefined achievement definitions
|
|
||||||
CREATE TABLE IF NOT EXISTS sexy_achievements (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
code VARCHAR(50) UNIQUE NOT NULL, -- Unique identifier (e.g., "first_recording", "recording_100")
|
|
||||||
name VARCHAR(255) NOT NULL, -- Display name
|
|
||||||
description TEXT, -- Achievement description
|
|
||||||
icon VARCHAR(255), -- Icon identifier or emoji
|
|
||||||
category VARCHAR(50) NOT NULL, -- e.g., "recordings", "playback", "social", "special"
|
|
||||||
required_count INTEGER, -- Number of actions needed to unlock
|
|
||||||
points_reward INTEGER DEFAULT 0, -- Bonus points awarded upon unlock
|
|
||||||
sort INTEGER DEFAULT 0, -- Display order
|
|
||||||
status VARCHAR(20) DEFAULT 'published' -- published, draft, archived
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_achievements_category ON sexy_achievements(category);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_achievements_code ON sexy_achievements(code);
|
|
||||||
|
|
||||||
COMMENT ON TABLE sexy_achievements IS 'Predefined achievement definitions for gamification';
|
|
||||||
COMMENT ON COLUMN sexy_achievements.code IS 'Unique code used in backend logic (e.g., first_recording, play_100)';
|
|
||||||
COMMENT ON COLUMN sexy_achievements.category IS 'Achievement category: recordings, playback, social, special';
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Table: sexy_user_achievements
|
|
||||||
-- ====================
|
|
||||||
-- Junction table tracking unlocked achievements per user
|
|
||||||
CREATE TABLE IF NOT EXISTS sexy_user_achievements (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
user_id UUID NOT NULL REFERENCES directus_users(id) ON DELETE CASCADE,
|
|
||||||
achievement_id UUID NOT NULL REFERENCES sexy_achievements(id) ON DELETE CASCADE,
|
|
||||||
progress INTEGER DEFAULT 0, -- Current progress toward unlocking
|
|
||||||
date_unlocked TIMESTAMP WITH TIME ZONE, -- NULL if not yet unlocked
|
|
||||||
UNIQUE(user_id, achievement_id)
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_user ON sexy_user_achievements(user_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_achievement ON sexy_user_achievements(achievement_id);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_achievements_unlocked ON sexy_user_achievements(date_unlocked) WHERE date_unlocked IS NOT NULL;
|
|
||||||
|
|
||||||
COMMENT ON TABLE sexy_user_achievements IS 'Tracks which achievements users have unlocked';
|
|
||||||
COMMENT ON COLUMN sexy_user_achievements.progress IS 'Current progress (e.g., 7/10 recordings created)';
|
|
||||||
COMMENT ON COLUMN sexy_user_achievements.date_unlocked IS 'NULL if achievement not yet unlocked';
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Table: sexy_user_stats
|
|
||||||
-- ====================
|
|
||||||
-- Cached aggregate statistics for efficient leaderboard queries
|
|
||||||
CREATE TABLE IF NOT EXISTS sexy_user_stats (
|
|
||||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
|
||||||
user_id UUID UNIQUE NOT NULL REFERENCES directus_users(id) ON DELETE CASCADE,
|
|
||||||
total_raw_points INTEGER DEFAULT 0, -- Sum of all points (no decay)
|
|
||||||
total_weighted_points NUMERIC(10,2) DEFAULT 0, -- Time-weighted score for rankings
|
|
||||||
recordings_count INTEGER DEFAULT 0, -- Number of published recordings
|
|
||||||
playbacks_count INTEGER DEFAULT 0, -- Number of recordings played
|
|
||||||
comments_count INTEGER DEFAULT 0, -- Number of comments on recordings
|
|
||||||
achievements_count INTEGER DEFAULT 0, -- Number of unlocked achievements
|
|
||||||
last_updated TIMESTAMP WITH TIME ZONE DEFAULT NOW() -- Cache timestamp
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_stats_weighted ON sexy_user_stats(total_weighted_points DESC);
|
|
||||||
CREATE INDEX IF NOT EXISTS idx_user_stats_user ON sexy_user_stats(user_id);
|
|
||||||
|
|
||||||
COMMENT ON TABLE sexy_user_stats IS 'Cached user statistics for fast leaderboard queries';
|
|
||||||
COMMENT ON COLUMN sexy_user_stats.total_raw_points IS 'Sum of all points without time decay';
|
|
||||||
COMMENT ON COLUMN sexy_user_stats.total_weighted_points IS 'Time-weighted score using exponential decay (λ=0.005)';
|
|
||||||
COMMENT ON COLUMN sexy_user_stats.last_updated IS 'Timestamp for cache invalidation';
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Insert Initial Achievements
|
|
||||||
-- ====================
|
|
||||||
|
|
||||||
-- 🎬 Recordings (Creation)
|
|
||||||
INSERT INTO sexy_achievements (code, name, description, icon, category, required_count, points_reward, sort) VALUES
|
|
||||||
('first_recording', 'First Recording', 'Create your first recording', '🎬', 'recordings', 1, 50, 1),
|
|
||||||
('recording_10', 'Recording Enthusiast', 'Create 10 recordings', '📹', 'recordings', 10, 100, 2),
|
|
||||||
('recording_50', 'Prolific Creator', 'Create 50 recordings', '🎥', 'recordings', 50, 500, 3),
|
|
||||||
('recording_100', 'Recording Master', 'Create 100 recordings', '🏆', 'recordings', 100, 1000, 4),
|
|
||||||
('featured_recording', 'Featured Creator', 'Get a recording featured', '⭐', 'recordings', 1, 200, 5)
|
|
||||||
ON CONFLICT (code) DO NOTHING;
|
|
||||||
|
|
||||||
-- ▶️ Playback (Consumption)
|
|
||||||
INSERT INTO sexy_achievements (code, name, description, icon, category, required_count, points_reward, sort) VALUES
|
|
||||||
('first_play', 'First Play', 'Play your first recording', '▶️', 'playback', 1, 25, 10),
|
|
||||||
('play_100', 'Active Player', 'Play 100 recordings', '🎮', 'playback', 100, 250, 11),
|
|
||||||
('play_500', 'Playback Enthusiast', 'Play 500 recordings', '🔥', 'playback', 500, 1000, 12),
|
|
||||||
('completionist_10', 'Completionist', 'Complete 10 recordings to 90%+', '✅', 'playback', 10, 100, 13),
|
|
||||||
('completionist_100', 'Super Completionist', 'Complete 100 recordings', '💯', 'playback', 100, 500, 14)
|
|
||||||
ON CONFLICT (code) DO NOTHING;
|
|
||||||
|
|
||||||
-- 💬 Social (Community)
|
|
||||||
INSERT INTO sexy_achievements (code, name, description, icon, category, required_count, points_reward, sort) VALUES
|
|
||||||
('first_comment', 'First Comment', 'Leave your first comment', '💬', 'social', 1, 25, 20),
|
|
||||||
('comment_50', 'Conversationalist', 'Leave 50 comments', '💭', 'social', 50, 200, 21),
|
|
||||||
('comment_250', 'Community Voice', 'Leave 250 comments', '📣', 'social', 250, 750, 22)
|
|
||||||
ON CONFLICT (code) DO NOTHING;
|
|
||||||
|
|
||||||
-- ⭐ Special (Milestones)
|
|
||||||
INSERT INTO sexy_achievements (code, name, description, icon, category, required_count, points_reward, sort) VALUES
|
|
||||||
('early_adopter', 'Early Adopter', 'Join in the first month', '🚀', 'special', 1, 500, 30),
|
|
||||||
('one_year', 'One Year Anniversary', 'Be a member for 1 year', '🎂', 'special', 1, 1000, 31),
|
|
||||||
('balanced_creator', 'Balanced Creator', '50 recordings + 100 plays', '⚖️', 'special', 1, 500, 32),
|
|
||||||
('top_10_rank', 'Top 10 Leaderboard', 'Reach top 10 on leaderboard', '🏅', 'special', 1, 2000, 33)
|
|
||||||
ON CONFLICT (code) DO NOTHING;
|
|
||||||
|
|
||||||
-- ====================
|
|
||||||
-- Verification Queries
|
|
||||||
-- ====================
|
|
||||||
|
|
||||||
-- Count tables created
|
|
||||||
SELECT
|
|
||||||
'sexy_recording_plays' as table_name,
|
|
||||||
COUNT(*) as row_count
|
|
||||||
FROM sexy_recording_plays
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'sexy_user_points', COUNT(*) FROM sexy_user_points
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'sexy_achievements', COUNT(*) FROM sexy_achievements
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'sexy_user_achievements', COUNT(*) FROM sexy_user_achievements
|
|
||||||
UNION ALL
|
|
||||||
SELECT 'sexy_user_stats', COUNT(*) FROM sexy_user_stats;
|
|
||||||
|
|
||||||
-- Show created achievements
|
|
||||||
SELECT
|
|
||||||
category,
|
|
||||||
COUNT(*) as achievement_count
|
|
||||||
FROM sexy_achievements
|
|
||||||
GROUP BY category
|
|
||||||
ORDER BY category;
|
|
||||||
75
package.json
75
package.json
@@ -1,30 +1,49 @@
|
|||||||
{
|
{
|
||||||
"name": "sexy.pivoine.art",
|
"name": "sexy.pivoine.art",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"description": "",
|
"description": "",
|
||||||
"main": "index.js",
|
"type": "module",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "echo \"Error: no test specified\" && exit 1",
|
"test": "echo \"Error: no test specified\" && exit 1",
|
||||||
"build:bundle": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/bundle build",
|
"build:frontend": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/frontend build",
|
||||||
"build:frontend": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/frontend build",
|
"build:backend": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/backend build",
|
||||||
"dev:data": "cd ../compose/data && docker compose up -d",
|
"dev:data": "docker compose up -d postgres redis",
|
||||||
"dev:directus": "cd ../compose/sexy && docker compose --env-file=.env.local up -d directus",
|
"dev:backend": "pnpm --filter @sexy.pivoine.art/backend dev",
|
||||||
"dev": "pnpm dev:data && pnpm dev:directus && pnpm --filter @sexy.pivoine.art/frontend dev"
|
"dev": "pnpm dev:data && pnpm dev:backend & pnpm --filter @sexy.pivoine.art/frontend dev",
|
||||||
},
|
"lint": "eslint .",
|
||||||
"keywords": [],
|
"lint:fix": "eslint . --fix",
|
||||||
"author": "",
|
"format": "prettier --write .",
|
||||||
"license": "ISC",
|
"format:check": "prettier --check .",
|
||||||
"packageManager": "pnpm@10.19.0",
|
"check": "pnpm -r --filter=!sexy.pivoine.art check"
|
||||||
"pnpm": {
|
},
|
||||||
"onlyBuiltDependencies": [
|
"keywords": [],
|
||||||
"es5-ext",
|
"author": {
|
||||||
"esbuild",
|
"name": "Valknar",
|
||||||
"svelte-preprocess",
|
"email": "valknar@pivoine.art"
|
||||||
"wasm-pack"
|
},
|
||||||
],
|
"license": "MIT",
|
||||||
"ignoredBuiltDependencies": [
|
"packageManager": "pnpm@10.19.0",
|
||||||
"@tailwindcss/oxide",
|
"pnpm": {
|
||||||
"node-sass"
|
"onlyBuiltDependencies": [
|
||||||
]
|
"argon2",
|
||||||
}
|
"es5-ext",
|
||||||
|
"esbuild",
|
||||||
|
"svelte-preprocess",
|
||||||
|
"wasm-pack"
|
||||||
|
],
|
||||||
|
"ignoredBuiltDependencies": [
|
||||||
|
"@tailwindcss/oxide",
|
||||||
|
"node-sass"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@eslint/js": "^10.0.1",
|
||||||
|
"eslint": "^10.0.2",
|
||||||
|
"eslint-config-prettier": "^10.1.8",
|
||||||
|
"eslint-plugin-svelte": "^3.15.0",
|
||||||
|
"globals": "^17.4.0",
|
||||||
|
"prettier": "^3.8.1",
|
||||||
|
"prettier-plugin-svelte": "^3.5.1",
|
||||||
|
"typescript-eslint": "^8.56.1"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
10
packages/backend/drizzle.config.ts
Normal file
10
packages/backend/drizzle.config.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import { defineConfig } from "drizzle-kit";
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
schema: "./src/db/schema/*.ts",
|
||||||
|
out: "./src/migrations",
|
||||||
|
dialect: "postgresql",
|
||||||
|
dbCredentials: {
|
||||||
|
url: process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy",
|
||||||
|
},
|
||||||
|
});
|
||||||
50
packages/backend/package.json
Normal file
50
packages/backend/package.json
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
{
|
||||||
|
"name": "@sexy.pivoine.art/backend",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"private": true,
|
||||||
|
"scripts": {
|
||||||
|
"dev": "UPLOAD_DIR=../../.data/uploads DATABASE_URL=postgresql://sexy:sexy@localhost:5432/sexy REDIS_URL=redis://localhost:6379 tsx watch src/index.ts",
|
||||||
|
"build": "tsc",
|
||||||
|
"start": "node dist/index.js",
|
||||||
|
"db:generate": "drizzle-kit generate",
|
||||||
|
"db:migrate": "drizzle-kit migrate",
|
||||||
|
"db:studio": "drizzle-kit studio",
|
||||||
|
"schema:migrate": "tsx src/scripts/migrate.ts",
|
||||||
|
"migrate": "tsx src/scripts/data-migration.ts",
|
||||||
|
"check": "tsc --noEmit"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"@sexy.pivoine.art/types": "workspace:*",
|
||||||
|
"@fastify/cookie": "^11.0.2",
|
||||||
|
"@fastify/cors": "^10.0.2",
|
||||||
|
"@fastify/multipart": "^9.0.3",
|
||||||
|
"@fastify/static": "^8.1.1",
|
||||||
|
"@pothos/core": "^4.4.0",
|
||||||
|
"@pothos/plugin-errors": "^4.2.0",
|
||||||
|
"argon2": "^0.43.0",
|
||||||
|
"drizzle-orm": "^0.44.1",
|
||||||
|
"fastify": "^5.4.0",
|
||||||
|
"fluent-ffmpeg": "^2.1.3",
|
||||||
|
"graphql": "^16.11.0",
|
||||||
|
"graphql-scalars": "^1.24.2",
|
||||||
|
"graphql-ws": "^6.0.4",
|
||||||
|
"graphql-yoga": "^5.13.4",
|
||||||
|
"ioredis": "^5.6.1",
|
||||||
|
"nanoid": "^3.3.11",
|
||||||
|
"nodemailer": "^7.0.3",
|
||||||
|
"pg": "^8.16.0",
|
||||||
|
"sharp": "^0.33.5",
|
||||||
|
"slugify": "^1.6.6",
|
||||||
|
"uuid": "^11.1.0"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/fluent-ffmpeg": "^2.1.27",
|
||||||
|
"@types/nodemailer": "^6.4.17",
|
||||||
|
"@types/pg": "^8.15.4",
|
||||||
|
"@types/sharp": "^0.32.0",
|
||||||
|
"@types/uuid": "^10.0.0",
|
||||||
|
"drizzle-kit": "^0.31.1",
|
||||||
|
"tsx": "^4.19.4",
|
||||||
|
"typescript": "^5.9.3"
|
||||||
|
}
|
||||||
|
}
|
||||||
11
packages/backend/src/db/connection.ts
Normal file
11
packages/backend/src/db/connection.ts
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
import { drizzle } from "drizzle-orm/node-postgres";
|
||||||
|
import { Pool } from "pg";
|
||||||
|
import * as schema from "./schema/index";
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
connectionString: process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy",
|
||||||
|
max: 20,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const db = drizzle(pool, { schema });
|
||||||
|
export type DB = typeof db;
|
||||||
32
packages/backend/src/db/schema/articles.ts
Normal file
32
packages/backend/src/db/schema/articles.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import { pgTable, text, timestamp, boolean, index, uniqueIndex } from "drizzle-orm/pg-core";
|
||||||
|
import { users } from "./users";
|
||||||
|
import { files } from "./files";
|
||||||
|
|
||||||
|
export const articles = pgTable(
|
||||||
|
"articles",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
slug: text("slug").notNull(),
|
||||||
|
title: text("title").notNull(),
|
||||||
|
excerpt: text("excerpt"),
|
||||||
|
content: text("content"),
|
||||||
|
image: text("image").references(() => files.id, { onDelete: "set null" }),
|
||||||
|
tags: text("tags").array().default([]),
|
||||||
|
publish_date: timestamp("publish_date").notNull().defaultNow(),
|
||||||
|
author: text("author").references(() => users.id, { onDelete: "set null" }),
|
||||||
|
category: text("category"),
|
||||||
|
featured: boolean("featured").default(false),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
date_updated: timestamp("date_updated"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
uniqueIndex("articles_slug_idx").on(t.slug),
|
||||||
|
index("articles_publish_date_idx").on(t.publish_date),
|
||||||
|
index("articles_featured_idx").on(t.featured),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type Article = typeof articles.$inferSelect;
|
||||||
|
export type NewArticle = typeof articles.$inferInsert;
|
||||||
24
packages/backend/src/db/schema/comments.ts
Normal file
24
packages/backend/src/db/schema/comments.ts
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
import { pgTable, text, timestamp, index, integer } from "drizzle-orm/pg-core";
|
||||||
|
import { users } from "./users";
|
||||||
|
|
||||||
|
export const comments = pgTable(
|
||||||
|
"comments",
|
||||||
|
{
|
||||||
|
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
collection: text("collection").notNull(), // 'videos' | 'recordings'
|
||||||
|
item_id: text("item_id").notNull(),
|
||||||
|
comment: text("comment").notNull(),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
date_updated: timestamp("date_updated"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index("comments_collection_item_idx").on(t.collection, t.item_id),
|
||||||
|
index("comments_user_idx").on(t.user_id),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type Comment = typeof comments.$inferSelect;
|
||||||
|
export type NewComment = typeof comments.$inferInsert;
|
||||||
22
packages/backend/src/db/schema/files.ts
Normal file
22
packages/backend/src/db/schema/files.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import { pgTable, text, timestamp, bigint, integer, index } from "drizzle-orm/pg-core";
|
||||||
|
|
||||||
|
export const files = pgTable(
|
||||||
|
"files",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
title: text("title"),
|
||||||
|
description: text("description"),
|
||||||
|
filename: text("filename").notNull(),
|
||||||
|
mime_type: text("mime_type"),
|
||||||
|
filesize: bigint("filesize", { mode: "number" }),
|
||||||
|
duration: integer("duration"),
|
||||||
|
uploaded_by: text("uploaded_by"),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [index("files_uploaded_by_idx").on(t.uploaded_by)],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type File = typeof files.$inferSelect;
|
||||||
|
export type NewFile = typeof files.$inferInsert;
|
||||||
93
packages/backend/src/db/schema/gamification.ts
Normal file
93
packages/backend/src/db/schema/gamification.ts
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
import {
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
integer,
|
||||||
|
real,
|
||||||
|
index,
|
||||||
|
pgEnum,
|
||||||
|
uniqueIndex,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { users } from "./users";
|
||||||
|
import { recordings } from "./recordings";
|
||||||
|
|
||||||
|
export const achievementStatusEnum = pgEnum("achievement_status", ["draft", "published"]);
|
||||||
|
|
||||||
|
export const achievements = pgTable(
|
||||||
|
"achievements",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
code: text("code").notNull(),
|
||||||
|
name: text("name").notNull(),
|
||||||
|
description: text("description"),
|
||||||
|
icon: text("icon"),
|
||||||
|
category: text("category"),
|
||||||
|
required_count: integer("required_count").notNull().default(1),
|
||||||
|
points_reward: integer("points_reward").notNull().default(0),
|
||||||
|
status: achievementStatusEnum("status").notNull().default("published"),
|
||||||
|
sort: integer("sort").default(0),
|
||||||
|
},
|
||||||
|
(t) => [uniqueIndex("achievements_code_idx").on(t.code)],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const user_achievements = pgTable(
|
||||||
|
"user_achievements",
|
||||||
|
{
|
||||||
|
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
achievement_id: text("achievement_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => achievements.id, { onDelete: "cascade" }),
|
||||||
|
progress: integer("progress").default(0),
|
||||||
|
date_unlocked: timestamp("date_unlocked"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index("user_achievements_user_idx").on(t.user_id),
|
||||||
|
uniqueIndex("user_achievements_unique_idx").on(t.user_id, t.achievement_id),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const user_points = pgTable(
|
||||||
|
"user_points",
|
||||||
|
{
|
||||||
|
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
action: text("action").notNull(),
|
||||||
|
points: integer("points").notNull(),
|
||||||
|
recording_id: text("recording_id").references(() => recordings.id, {
|
||||||
|
onDelete: "set null",
|
||||||
|
}),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index("user_points_user_idx").on(t.user_id),
|
||||||
|
index("user_points_date_idx").on(t.date_created),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const user_stats = pgTable(
|
||||||
|
"user_stats",
|
||||||
|
{
|
||||||
|
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
total_raw_points: integer("total_raw_points").default(0),
|
||||||
|
total_weighted_points: real("total_weighted_points").default(0),
|
||||||
|
recordings_count: integer("recordings_count").default(0),
|
||||||
|
playbacks_count: integer("playbacks_count").default(0),
|
||||||
|
comments_count: integer("comments_count").default(0),
|
||||||
|
achievements_count: integer("achievements_count").default(0),
|
||||||
|
last_updated: timestamp("last_updated").defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [uniqueIndex("user_stats_user_idx").on(t.user_id)],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type Achievement = typeof achievements.$inferSelect;
|
||||||
|
export type UserStats = typeof user_stats.$inferSelect;
|
||||||
7
packages/backend/src/db/schema/index.ts
Normal file
7
packages/backend/src/db/schema/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
export * from "./files";
|
||||||
|
export * from "./users";
|
||||||
|
export * from "./videos";
|
||||||
|
export * from "./articles";
|
||||||
|
export * from "./recordings";
|
||||||
|
export * from "./comments";
|
||||||
|
export * from "./gamification";
|
||||||
73
packages/backend/src/db/schema/recordings.ts
Normal file
73
packages/backend/src/db/schema/recordings.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import {
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
boolean,
|
||||||
|
integer,
|
||||||
|
pgEnum,
|
||||||
|
index,
|
||||||
|
uniqueIndex,
|
||||||
|
jsonb,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { users } from "./users";
|
||||||
|
import { videos } from "./videos";
|
||||||
|
|
||||||
|
export const recordingStatusEnum = pgEnum("recording_status", ["draft", "published"]);
|
||||||
|
|
||||||
|
export const recordings = pgTable(
|
||||||
|
"recordings",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
title: text("title").notNull(),
|
||||||
|
description: text("description"),
|
||||||
|
slug: text("slug").notNull(),
|
||||||
|
duration: integer("duration").notNull(),
|
||||||
|
events: jsonb("events").$type<object[]>().default([]),
|
||||||
|
device_info: jsonb("device_info").$type<object[]>().default([]),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
status: recordingStatusEnum("status").notNull().default("draft"),
|
||||||
|
tags: text("tags").array().default([]),
|
||||||
|
linked_video: text("linked_video").references(() => videos.id, {
|
||||||
|
onDelete: "set null",
|
||||||
|
}),
|
||||||
|
featured: boolean("featured").default(false),
|
||||||
|
public: boolean("public").default(false),
|
||||||
|
original_recording_id: text("original_recording_id"),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
date_updated: timestamp("date_updated"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
uniqueIndex("recordings_slug_idx").on(t.slug),
|
||||||
|
index("recordings_user_idx").on(t.user_id),
|
||||||
|
index("recordings_status_idx").on(t.status),
|
||||||
|
index("recordings_public_idx").on(t.public),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const recording_plays = pgTable(
|
||||||
|
"recording_plays",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
recording_id: text("recording_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => recordings.id, { onDelete: "cascade" }),
|
||||||
|
user_id: text("user_id").references(() => users.id, { onDelete: "set null" }),
|
||||||
|
duration_played: integer("duration_played").default(0),
|
||||||
|
completed: boolean("completed").default(false),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
date_updated: timestamp("date_updated"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index("recording_plays_recording_idx").on(t.recording_id),
|
||||||
|
index("recording_plays_user_idx").on(t.user_id),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type Recording = typeof recordings.$inferSelect;
|
||||||
|
export type NewRecording = typeof recordings.$inferInsert;
|
||||||
63
packages/backend/src/db/schema/users.ts
Normal file
63
packages/backend/src/db/schema/users.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import {
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
pgEnum,
|
||||||
|
boolean,
|
||||||
|
index,
|
||||||
|
uniqueIndex,
|
||||||
|
integer,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { files } from "./files";
|
||||||
|
|
||||||
|
export const roleEnum = pgEnum("user_role", ["model", "viewer", "admin"]);
|
||||||
|
|
||||||
|
export const users = pgTable(
|
||||||
|
"users",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
email: text("email").notNull(),
|
||||||
|
password_hash: text("password_hash").notNull(),
|
||||||
|
first_name: text("first_name"),
|
||||||
|
last_name: text("last_name"),
|
||||||
|
artist_name: text("artist_name"),
|
||||||
|
slug: text("slug"),
|
||||||
|
description: text("description"),
|
||||||
|
tags: text("tags").array().default([]),
|
||||||
|
role: roleEnum("role").notNull().default("viewer"),
|
||||||
|
avatar: text("avatar").references(() => files.id, { onDelete: "set null" }),
|
||||||
|
banner: text("banner").references(() => files.id, { onDelete: "set null" }),
|
||||||
|
is_admin: boolean("is_admin").notNull().default(false),
|
||||||
|
email_verified: boolean("email_verified").notNull().default(false),
|
||||||
|
email_verify_token: text("email_verify_token"),
|
||||||
|
password_reset_token: text("password_reset_token"),
|
||||||
|
password_reset_expiry: timestamp("password_reset_expiry"),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
date_updated: timestamp("date_updated"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
uniqueIndex("users_email_idx").on(t.email),
|
||||||
|
uniqueIndex("users_slug_idx").on(t.slug),
|
||||||
|
index("users_role_idx").on(t.role),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const user_photos = pgTable(
|
||||||
|
"user_photos",
|
||||||
|
{
|
||||||
|
id: integer("id").primaryKey().generatedAlwaysAsIdentity(),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
file_id: text("file_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => files.id, { onDelete: "cascade" }),
|
||||||
|
sort: integer("sort").default(0),
|
||||||
|
},
|
||||||
|
(t) => [index("user_photos_user_idx").on(t.user_id)],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type User = typeof users.$inferSelect;
|
||||||
|
export type NewUser = typeof users.$inferInsert;
|
||||||
96
packages/backend/src/db/schema/videos.ts
Normal file
96
packages/backend/src/db/schema/videos.ts
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
import {
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
boolean,
|
||||||
|
integer,
|
||||||
|
index,
|
||||||
|
uniqueIndex,
|
||||||
|
primaryKey,
|
||||||
|
} from "drizzle-orm/pg-core";
|
||||||
|
import { users } from "./users";
|
||||||
|
import { files } from "./files";
|
||||||
|
|
||||||
|
export const videos = pgTable(
|
||||||
|
"videos",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
slug: text("slug").notNull(),
|
||||||
|
title: text("title").notNull(),
|
||||||
|
description: text("description"),
|
||||||
|
image: text("image").references(() => files.id, { onDelete: "set null" }),
|
||||||
|
movie: text("movie").references(() => files.id, { onDelete: "set null" }),
|
||||||
|
tags: text("tags").array().default([]),
|
||||||
|
upload_date: timestamp("upload_date").notNull().defaultNow(),
|
||||||
|
premium: boolean("premium").default(false),
|
||||||
|
featured: boolean("featured").default(false),
|
||||||
|
likes_count: integer("likes_count").default(0),
|
||||||
|
plays_count: integer("plays_count").default(0),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
uniqueIndex("videos_slug_idx").on(t.slug),
|
||||||
|
index("videos_upload_date_idx").on(t.upload_date),
|
||||||
|
index("videos_featured_idx").on(t.featured),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const video_models = pgTable(
|
||||||
|
"video_models",
|
||||||
|
{
|
||||||
|
video_id: text("video_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => videos.id, { onDelete: "cascade" }),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
},
|
||||||
|
(t) => [primaryKey({ columns: [t.video_id, t.user_id] })],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const video_likes = pgTable(
|
||||||
|
"video_likes",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
video_id: text("video_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => videos.id, { onDelete: "cascade" }),
|
||||||
|
user_id: text("user_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => users.id, { onDelete: "cascade" }),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index("video_likes_video_idx").on(t.video_id),
|
||||||
|
index("video_likes_user_idx").on(t.user_id),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export const video_plays = pgTable(
|
||||||
|
"video_plays",
|
||||||
|
{
|
||||||
|
id: text("id")
|
||||||
|
.primaryKey()
|
||||||
|
.$defaultFn(() => crypto.randomUUID()),
|
||||||
|
video_id: text("video_id")
|
||||||
|
.notNull()
|
||||||
|
.references(() => videos.id, { onDelete: "cascade" }),
|
||||||
|
user_id: text("user_id").references(() => users.id, { onDelete: "set null" }),
|
||||||
|
session_id: text("session_id"),
|
||||||
|
duration_watched: integer("duration_watched"),
|
||||||
|
completed: boolean("completed").default(false),
|
||||||
|
date_created: timestamp("date_created").notNull().defaultNow(),
|
||||||
|
date_updated: timestamp("date_updated"),
|
||||||
|
},
|
||||||
|
(t) => [
|
||||||
|
index("video_plays_video_idx").on(t.video_id),
|
||||||
|
index("video_plays_user_idx").on(t.user_id),
|
||||||
|
index("video_plays_date_idx").on(t.date_created),
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
export type Video = typeof videos.$inferSelect;
|
||||||
|
export type NewVideo = typeof videos.$inferInsert;
|
||||||
30
packages/backend/src/graphql/builder.ts
Normal file
30
packages/backend/src/graphql/builder.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import SchemaBuilder from "@pothos/core";
|
||||||
|
import ErrorsPlugin from "@pothos/plugin-errors";
|
||||||
|
import type { DB } from "../db/connection";
|
||||||
|
import type { SessionUser } from "../lib/auth";
|
||||||
|
import type Redis from "ioredis";
|
||||||
|
import { GraphQLDateTime, GraphQLJSON } from "graphql-scalars";
|
||||||
|
|
||||||
|
export type Context = {
|
||||||
|
db: DB;
|
||||||
|
redis: InstanceType<typeof Redis>;
|
||||||
|
currentUser: SessionUser | null;
|
||||||
|
request: Request;
|
||||||
|
reply: unknown;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const builder = new SchemaBuilder<{
|
||||||
|
Context: Context;
|
||||||
|
Scalars: {
|
||||||
|
DateTime: { Input: Date; Output: Date };
|
||||||
|
JSON: { Input: unknown; Output: unknown };
|
||||||
|
};
|
||||||
|
}>({
|
||||||
|
plugins: [ErrorsPlugin],
|
||||||
|
});
|
||||||
|
|
||||||
|
builder.addScalarType("DateTime", GraphQLDateTime, {});
|
||||||
|
builder.addScalarType("JSON", GraphQLJSON, {});
|
||||||
|
|
||||||
|
builder.queryType({});
|
||||||
|
builder.mutationType({});
|
||||||
66
packages/backend/src/graphql/context.ts
Normal file
66
packages/backend/src/graphql/context.ts
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
import type { YogaInitialContext } from "graphql-yoga";
|
||||||
|
import type { FastifyRequest, FastifyReply } from "fastify";
|
||||||
|
import type { Context } from "./builder";
|
||||||
|
import { getSession, setSession } from "../lib/auth";
|
||||||
|
import { db } from "../db/connection";
|
||||||
|
import { redis } from "../lib/auth";
|
||||||
|
import { users } from "../db/schema/index";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
|
||||||
|
type ServerContext = {
|
||||||
|
req: FastifyRequest;
|
||||||
|
reply: FastifyReply;
|
||||||
|
db: typeof db;
|
||||||
|
redis: typeof redis;
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function buildContext(ctx: YogaInitialContext & ServerContext): Promise<Context> {
|
||||||
|
const request = ctx.request;
|
||||||
|
const cookieHeader = request.headers.get("cookie") || "";
|
||||||
|
|
||||||
|
// Parse session_token from cookies
|
||||||
|
const cookies = Object.fromEntries(
|
||||||
|
cookieHeader.split(";").map((c) => {
|
||||||
|
const [k, ...v] = c.trim().split("=");
|
||||||
|
return [k.trim(), v.join("=")];
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const token = cookies["session_token"];
|
||||||
|
let currentUser = null;
|
||||||
|
|
||||||
|
if (token) {
|
||||||
|
const session = await getSession(token); // also slides TTL
|
||||||
|
if (session) {
|
||||||
|
const dbInstance = ctx.db || db;
|
||||||
|
const [dbUser] = await dbInstance
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, session.id))
|
||||||
|
.limit(1);
|
||||||
|
if (dbUser) {
|
||||||
|
currentUser = {
|
||||||
|
id: dbUser.id,
|
||||||
|
email: dbUser.email,
|
||||||
|
role: (dbUser.role === "admin" ? "viewer" : dbUser.role) as "model" | "viewer",
|
||||||
|
is_admin: dbUser.is_admin,
|
||||||
|
first_name: dbUser.first_name,
|
||||||
|
last_name: dbUser.last_name,
|
||||||
|
artist_name: dbUser.artist_name,
|
||||||
|
slug: dbUser.slug,
|
||||||
|
avatar: dbUser.avatar,
|
||||||
|
};
|
||||||
|
// Refresh cached session with up-to-date data
|
||||||
|
await setSession(token, currentUser);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
db: ctx.db || db,
|
||||||
|
redis: ctx.redis || redis,
|
||||||
|
currentUser,
|
||||||
|
request,
|
||||||
|
reply: ctx.reply,
|
||||||
|
};
|
||||||
|
}
|
||||||
14
packages/backend/src/graphql/index.ts
Normal file
14
packages/backend/src/graphql/index.ts
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import "./builder.js";
|
||||||
|
import "./types/index.js";
|
||||||
|
import "./resolvers/auth.js";
|
||||||
|
import "./resolvers/users.js";
|
||||||
|
import "./resolvers/videos.js";
|
||||||
|
import "./resolvers/models.js";
|
||||||
|
import "./resolvers/articles.js";
|
||||||
|
import "./resolvers/recordings.js";
|
||||||
|
import "./resolvers/comments.js";
|
||||||
|
import "./resolvers/gamification.js";
|
||||||
|
import "./resolvers/stats.js";
|
||||||
|
import { builder } from "./builder";
|
||||||
|
|
||||||
|
export const schema = builder.toSchema();
|
||||||
255
packages/backend/src/graphql/resolvers/articles.ts
Normal file
255
packages/backend/src/graphql/resolvers/articles.ts
Normal file
@@ -0,0 +1,255 @@
|
|||||||
|
import { builder } from "../builder";
|
||||||
|
import { ArticleType, ArticleListType, AdminArticleListType } from "../types/index";
|
||||||
|
import { articles, users } from "../../db/schema/index";
|
||||||
|
import { eq, and, lte, desc, asc, ilike, or, count, arrayContains } from "drizzle-orm";
|
||||||
|
import { requireAdmin } from "../../lib/acl";
|
||||||
|
|
||||||
|
async function enrichArticle(db: any, article: any) {
|
||||||
|
let author = null;
|
||||||
|
if (article.author) {
|
||||||
|
const authorUser = await db
|
||||||
|
.select({
|
||||||
|
id: users.id,
|
||||||
|
artist_name: users.artist_name,
|
||||||
|
slug: users.slug,
|
||||||
|
avatar: users.avatar,
|
||||||
|
})
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, article.author))
|
||||||
|
.limit(1);
|
||||||
|
author = authorUser[0] || null;
|
||||||
|
}
|
||||||
|
return { ...article, author };
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.queryField("articles", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ArticleListType,
|
||||||
|
args: {
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
search: t.arg.string(),
|
||||||
|
category: t.arg.string(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
sortBy: t.arg.string(),
|
||||||
|
tag: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const pageSize = args.limit ?? 24;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions: any[] = [lte(articles.publish_date, new Date())];
|
||||||
|
if (args.featured !== null && args.featured !== undefined) {
|
||||||
|
conditions.push(eq(articles.featured, args.featured));
|
||||||
|
}
|
||||||
|
if (args.category) conditions.push(eq(articles.category, args.category));
|
||||||
|
if (args.tag) conditions.push(arrayContains(articles.tags, [args.tag]));
|
||||||
|
if (args.search) {
|
||||||
|
conditions.push(
|
||||||
|
or(
|
||||||
|
ilike(articles.title, `%${args.search}%`),
|
||||||
|
ilike(articles.excerpt, `%${args.search}%`),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const orderArgs =
|
||||||
|
args.sortBy === "name"
|
||||||
|
? [asc(articles.title)]
|
||||||
|
: args.sortBy === "featured"
|
||||||
|
? [desc(articles.featured), desc(articles.publish_date)]
|
||||||
|
: [desc(articles.publish_date)];
|
||||||
|
|
||||||
|
const where = and(...conditions);
|
||||||
|
const [articleList, totalRows] = await Promise.all([
|
||||||
|
(ctx.db.select().from(articles).where(where) as any)
|
||||||
|
.orderBy(...orderArgs)
|
||||||
|
.limit(pageSize)
|
||||||
|
.offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(articles).where(where),
|
||||||
|
]);
|
||||||
|
const items = await Promise.all(
|
||||||
|
articleList.map((article: any) => enrichArticle(ctx.db, article)),
|
||||||
|
);
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("article", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ArticleType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
slug: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const article = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(articles)
|
||||||
|
.where(and(eq(articles.slug, args.slug), lte(articles.publish_date, new Date())))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!article[0]) return null;
|
||||||
|
return enrichArticle(ctx.db, article[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("adminGetArticle", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ArticleType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const article = await ctx.db.select().from(articles).where(eq(articles.id, args.id)).limit(1);
|
||||||
|
if (!article[0]) return null;
|
||||||
|
return enrichArticle(ctx.db, article[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Admin queries & mutations ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
builder.queryField("adminListArticles", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: AdminArticleListType,
|
||||||
|
args: {
|
||||||
|
search: t.arg.string(),
|
||||||
|
category: t.arg.string(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const limit = args.limit ?? 50;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions: any[] = [];
|
||||||
|
if (args.search) {
|
||||||
|
conditions.push(
|
||||||
|
or(
|
||||||
|
ilike(articles.title, `%${args.search}%`),
|
||||||
|
ilike(articles.excerpt, `%${args.search}%`),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
if (args.category) conditions.push(eq(articles.category, args.category));
|
||||||
|
if (args.featured !== null && args.featured !== undefined)
|
||||||
|
conditions.push(eq(articles.featured, args.featured));
|
||||||
|
|
||||||
|
const where = conditions.length > 0 ? and(...conditions) : undefined;
|
||||||
|
const [articleList, totalRows] = await Promise.all([
|
||||||
|
ctx.db
|
||||||
|
.select()
|
||||||
|
.from(articles)
|
||||||
|
.where(where)
|
||||||
|
.orderBy(desc(articles.publish_date))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(articles).where(where),
|
||||||
|
]);
|
||||||
|
const items = await Promise.all(
|
||||||
|
articleList.map((article: any) => enrichArticle(ctx.db, article)),
|
||||||
|
);
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("createArticle", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ArticleType,
|
||||||
|
args: {
|
||||||
|
title: t.arg.string({ required: true }),
|
||||||
|
slug: t.arg.string({ required: true }),
|
||||||
|
excerpt: t.arg.string(),
|
||||||
|
content: t.arg.string(),
|
||||||
|
imageId: t.arg.string(),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
category: t.arg.string(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
publishDate: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const inserted = await ctx.db
|
||||||
|
.insert(articles)
|
||||||
|
.values({
|
||||||
|
title: args.title,
|
||||||
|
slug: args.slug,
|
||||||
|
excerpt: args.excerpt || null,
|
||||||
|
content: args.content || null,
|
||||||
|
image: args.imageId || null,
|
||||||
|
tags: args.tags || [],
|
||||||
|
category: args.category || null,
|
||||||
|
featured: args.featured ?? false,
|
||||||
|
publish_date: args.publishDate ? new Date(args.publishDate) : new Date(),
|
||||||
|
author: ctx.currentUser!.id,
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
return enrichArticle(ctx.db, inserted[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("updateArticle", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ArticleType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
title: t.arg.string(),
|
||||||
|
slug: t.arg.string(),
|
||||||
|
excerpt: t.arg.string(),
|
||||||
|
content: t.arg.string(),
|
||||||
|
imageId: t.arg.string(),
|
||||||
|
authorId: t.arg.string(),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
category: t.arg.string(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
publishDate: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const updates: Record<string, unknown> = { date_updated: new Date() };
|
||||||
|
if (args.title !== undefined && args.title !== null) updates.title = args.title;
|
||||||
|
if (args.slug !== undefined && args.slug !== null) updates.slug = args.slug;
|
||||||
|
if (args.excerpt !== undefined) updates.excerpt = args.excerpt;
|
||||||
|
if (args.content !== undefined) updates.content = args.content;
|
||||||
|
if (args.imageId !== undefined) updates.image = args.imageId;
|
||||||
|
if (args.authorId !== undefined) updates.author = args.authorId;
|
||||||
|
if (args.tags !== undefined && args.tags !== null) updates.tags = args.tags;
|
||||||
|
if (args.category !== undefined) updates.category = args.category;
|
||||||
|
if (args.featured !== undefined && args.featured !== null) updates.featured = args.featured;
|
||||||
|
if (args.publishDate !== undefined && args.publishDate !== null)
|
||||||
|
updates.publish_date = new Date(args.publishDate);
|
||||||
|
|
||||||
|
const updated = await ctx.db
|
||||||
|
.update(articles)
|
||||||
|
.set(updates as any)
|
||||||
|
.where(eq(articles.id, args.id))
|
||||||
|
.returning();
|
||||||
|
if (!updated[0]) return null;
|
||||||
|
return enrichArticle(ctx.db, updated[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("deleteArticle", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
await ctx.db.delete(articles).where(eq(articles.id, args.id));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
236
packages/backend/src/graphql/resolvers/auth.ts
Normal file
236
packages/backend/src/graphql/resolvers/auth.ts
Normal file
@@ -0,0 +1,236 @@
|
|||||||
|
import { GraphQLError } from "graphql";
|
||||||
|
import { builder } from "../builder";
|
||||||
|
import { CurrentUserType } from "../types/index";
|
||||||
|
import { users } from "../../db/schema/index";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { hash, verify as verifyArgon } from "../../lib/argon";
|
||||||
|
import { setSession, deleteSession } from "../../lib/auth";
|
||||||
|
import { sendVerification, sendPasswordReset } from "../../lib/email";
|
||||||
|
import { slugify } from "../../lib/slugify";
|
||||||
|
import { nanoid } from "nanoid";
|
||||||
|
|
||||||
|
builder.mutationField("login", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: CurrentUserType,
|
||||||
|
args: {
|
||||||
|
email: t.arg.string({ required: true }),
|
||||||
|
password: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const user = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.email, args.email.toLowerCase()))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!user[0]) throw new GraphQLError("Invalid credentials");
|
||||||
|
|
||||||
|
const valid = await verifyArgon(user[0].password_hash, args.password);
|
||||||
|
if (!valid) throw new GraphQLError("Invalid credentials");
|
||||||
|
|
||||||
|
const token = nanoid(32);
|
||||||
|
const sessionUser = {
|
||||||
|
id: user[0].id,
|
||||||
|
email: user[0].email,
|
||||||
|
role: (user[0].role === "admin" ? "viewer" : user[0].role) as "model" | "viewer",
|
||||||
|
is_admin: user[0].is_admin,
|
||||||
|
first_name: user[0].first_name,
|
||||||
|
last_name: user[0].last_name,
|
||||||
|
artist_name: user[0].artist_name,
|
||||||
|
slug: user[0].slug,
|
||||||
|
avatar: user[0].avatar,
|
||||||
|
};
|
||||||
|
|
||||||
|
await setSession(token, sessionUser);
|
||||||
|
|
||||||
|
// Set session cookie
|
||||||
|
const isProduction = process.env.NODE_ENV === "production";
|
||||||
|
const cookieValue = `session_token=${token}; HttpOnly; Path=/; SameSite=Strict; Max-Age=86400${isProduction ? "; Secure" : ""}`;
|
||||||
|
(ctx.reply as any).header?.("Set-Cookie", cookieValue);
|
||||||
|
|
||||||
|
// For graphql-yoga response
|
||||||
|
if ((ctx as any).serverResponse) {
|
||||||
|
(ctx as any).serverResponse.setHeader("Set-Cookie", cookieValue);
|
||||||
|
}
|
||||||
|
|
||||||
|
return user[0];
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("logout", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
resolve: async (_root, _args, ctx) => {
|
||||||
|
const cookieHeader = ctx.request.headers.get("cookie") || "";
|
||||||
|
const cookies = Object.fromEntries(
|
||||||
|
cookieHeader.split(";").map((c) => {
|
||||||
|
const [k, ...v] = c.trim().split("=");
|
||||||
|
return [k.trim(), v.join("=")];
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
const token = cookies["session_token"];
|
||||||
|
if (token) {
|
||||||
|
await deleteSession(token);
|
||||||
|
}
|
||||||
|
// Clear cookie
|
||||||
|
const isProduction = process.env.NODE_ENV === "production";
|
||||||
|
const cookieValue = `session_token=; HttpOnly; Path=/; SameSite=Strict; Max-Age=0${isProduction ? "; Secure" : ""}`;
|
||||||
|
(ctx.reply as any).header?.("Set-Cookie", cookieValue);
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("register", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
email: t.arg.string({ required: true }),
|
||||||
|
password: t.arg.string({ required: true }),
|
||||||
|
firstName: t.arg.string({ required: true }),
|
||||||
|
lastName: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select({ id: users.id })
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.email, args.email.toLowerCase()))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (existing.length > 0) throw new GraphQLError("Email already registered");
|
||||||
|
|
||||||
|
const passwordHash = await hash(args.password);
|
||||||
|
const artistName = `${args.firstName} ${args.lastName}`;
|
||||||
|
const baseSlug = slugify(artistName);
|
||||||
|
const verifyToken = nanoid(32);
|
||||||
|
|
||||||
|
// Ensure unique slug
|
||||||
|
let slug = baseSlug;
|
||||||
|
let attempt = 0;
|
||||||
|
while (true) {
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select({ id: users.id })
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.slug, slug))
|
||||||
|
.limit(1);
|
||||||
|
if (existing.length === 0) break;
|
||||||
|
attempt++;
|
||||||
|
slug = `${baseSlug}-${attempt}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
await ctx.db.insert(users).values({
|
||||||
|
email: args.email.toLowerCase(),
|
||||||
|
password_hash: passwordHash,
|
||||||
|
first_name: args.firstName,
|
||||||
|
last_name: args.lastName,
|
||||||
|
artist_name: artistName,
|
||||||
|
slug,
|
||||||
|
role: "viewer",
|
||||||
|
email_verify_token: verifyToken,
|
||||||
|
email_verified: false,
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
await sendVerification(args.email, verifyToken);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn("Failed to send verification email:", (e as Error).message);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("verifyEmail", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
token: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const user = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.email_verify_token, args.token))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!user[0]) throw new GraphQLError("Invalid verification token");
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(users)
|
||||||
|
.set({ email_verified: true, email_verify_token: null })
|
||||||
|
.where(eq(users.id, user[0].id));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("requestPasswordReset", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
email: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const user = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.email, args.email.toLowerCase()))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
// Always return true to prevent email enumeration
|
||||||
|
if (!user[0]) return true;
|
||||||
|
|
||||||
|
const token = nanoid(32);
|
||||||
|
const expiry = new Date(Date.now() + 60 * 60 * 1000); // 1 hour
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(users)
|
||||||
|
.set({ password_reset_token: token, password_reset_expiry: expiry })
|
||||||
|
.where(eq(users.id, user[0].id));
|
||||||
|
|
||||||
|
try {
|
||||||
|
await sendPasswordReset(args.email, token);
|
||||||
|
} catch (e) {
|
||||||
|
console.warn("Failed to send password reset email:", (e as Error).message);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("resetPassword", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
token: t.arg.string({ required: true }),
|
||||||
|
newPassword: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const user = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.password_reset_token, args.token))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!user[0]) throw new GraphQLError("Invalid or expired reset token");
|
||||||
|
if (user[0].password_reset_expiry && user[0].password_reset_expiry < new Date()) {
|
||||||
|
throw new GraphQLError("Reset token expired");
|
||||||
|
}
|
||||||
|
|
||||||
|
const passwordHash = await hash(args.newPassword);
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(users)
|
||||||
|
.set({
|
||||||
|
password_hash: passwordHash,
|
||||||
|
password_reset_token: null,
|
||||||
|
password_reset_expiry: null,
|
||||||
|
})
|
||||||
|
.where(eq(users.id, user[0].id));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
147
packages/backend/src/graphql/resolvers/comments.ts
Normal file
147
packages/backend/src/graphql/resolvers/comments.ts
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import { GraphQLError } from "graphql";
|
||||||
|
import { builder } from "../builder";
|
||||||
|
import { CommentType, AdminCommentListType } from "../types/index";
|
||||||
|
import { comments, users } from "../../db/schema/index";
|
||||||
|
import { eq, and, desc, ilike, or, count } from "drizzle-orm";
|
||||||
|
import { awardPoints, checkAchievements } from "../../lib/gamification";
|
||||||
|
import { requireOwnerOrAdmin, requireAdmin } from "../../lib/acl";
|
||||||
|
|
||||||
|
builder.queryField("commentsForVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: [CommentType],
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const commentList = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(comments)
|
||||||
|
.where(and(eq(comments.collection, "videos"), eq(comments.item_id, args.videoId)))
|
||||||
|
.orderBy(desc(comments.date_created));
|
||||||
|
|
||||||
|
return Promise.all(
|
||||||
|
commentList.map(async (c: any) => {
|
||||||
|
const user = await ctx.db
|
||||||
|
.select({
|
||||||
|
id: users.id,
|
||||||
|
first_name: users.first_name,
|
||||||
|
last_name: users.last_name,
|
||||||
|
artist_name: users.artist_name,
|
||||||
|
avatar: users.avatar,
|
||||||
|
})
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, c.user_id))
|
||||||
|
.limit(1);
|
||||||
|
return { ...c, user: user[0] || null };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("createCommentForVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: CommentType,
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
comment: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const newComment = await ctx.db
|
||||||
|
.insert(comments)
|
||||||
|
.values({
|
||||||
|
collection: "videos",
|
||||||
|
item_id: args.videoId,
|
||||||
|
comment: args.comment,
|
||||||
|
user_id: ctx.currentUser.id,
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
// Gamification (non-blocking)
|
||||||
|
awardPoints(ctx.db, ctx.currentUser.id, "COMMENT_CREATE")
|
||||||
|
.then(() => checkAchievements(ctx.db, ctx.currentUser!.id, "social"))
|
||||||
|
.catch((e) => console.error("Gamification error on comment:", e));
|
||||||
|
|
||||||
|
const user = await ctx.db
|
||||||
|
.select({
|
||||||
|
id: users.id,
|
||||||
|
first_name: users.first_name,
|
||||||
|
last_name: users.last_name,
|
||||||
|
artist_name: users.artist_name,
|
||||||
|
avatar: users.avatar,
|
||||||
|
})
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, ctx.currentUser.id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
return { ...newComment[0], user: user[0] || null };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("deleteComment", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
id: t.arg.int({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const comment = await ctx.db.select().from(comments).where(eq(comments.id, args.id)).limit(1);
|
||||||
|
if (!comment[0]) throw new GraphQLError("Comment not found");
|
||||||
|
requireOwnerOrAdmin(ctx, comment[0].user_id);
|
||||||
|
await ctx.db.delete(comments).where(eq(comments.id, args.id));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("adminListComments", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: AdminCommentListType,
|
||||||
|
args: {
|
||||||
|
search: t.arg.string(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const limit = args.limit ?? 50;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions = args.search ? [ilike(comments.comment, `%${args.search}%`)] : [];
|
||||||
|
const where = conditions.length > 0 ? and(...conditions) : undefined;
|
||||||
|
|
||||||
|
const [commentList, totalRows] = await Promise.all([
|
||||||
|
ctx.db
|
||||||
|
.select()
|
||||||
|
.from(comments)
|
||||||
|
.where(where)
|
||||||
|
.orderBy(desc(comments.date_created))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(comments).where(where),
|
||||||
|
]);
|
||||||
|
|
||||||
|
const items = await Promise.all(
|
||||||
|
commentList.map(async (c: any) => {
|
||||||
|
const user = await ctx.db
|
||||||
|
.select({
|
||||||
|
id: users.id,
|
||||||
|
first_name: users.first_name,
|
||||||
|
last_name: users.last_name,
|
||||||
|
artist_name: users.artist_name,
|
||||||
|
avatar: users.avatar,
|
||||||
|
})
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, c.user_id))
|
||||||
|
.limit(1);
|
||||||
|
return { ...c, user: user[0] || null };
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
125
packages/backend/src/graphql/resolvers/gamification.ts
Normal file
125
packages/backend/src/graphql/resolvers/gamification.ts
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
import { builder } from "../builder";
|
||||||
|
import { LeaderboardEntryType, UserGamificationType, AchievementType } from "../types/index";
|
||||||
|
import {
|
||||||
|
user_stats,
|
||||||
|
users,
|
||||||
|
user_achievements,
|
||||||
|
achievements,
|
||||||
|
user_points,
|
||||||
|
} from "../../db/schema/index";
|
||||||
|
import { eq, desc, gt, count, isNotNull, and } from "drizzle-orm";
|
||||||
|
|
||||||
|
builder.queryField("leaderboard", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: [LeaderboardEntryType],
|
||||||
|
args: {
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const limit = Math.min(args.limit || 100, 500);
|
||||||
|
const offset = args.offset || 0;
|
||||||
|
|
||||||
|
const entries = await ctx.db
|
||||||
|
.select({
|
||||||
|
user_id: user_stats.user_id,
|
||||||
|
display_name: users.artist_name,
|
||||||
|
avatar: users.avatar,
|
||||||
|
total_weighted_points: user_stats.total_weighted_points,
|
||||||
|
total_raw_points: user_stats.total_raw_points,
|
||||||
|
recordings_count: user_stats.recordings_count,
|
||||||
|
playbacks_count: user_stats.playbacks_count,
|
||||||
|
achievements_count: user_stats.achievements_count,
|
||||||
|
})
|
||||||
|
.from(user_stats)
|
||||||
|
.leftJoin(users, eq(user_stats.user_id, users.id))
|
||||||
|
.orderBy(desc(user_stats.total_weighted_points))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset);
|
||||||
|
|
||||||
|
return entries.map((e: any, i: number) => ({ ...e, rank: offset + i + 1 }));
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("userGamification", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: UserGamificationType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
userId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const stats = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(user_stats)
|
||||||
|
.where(eq(user_stats.user_id, args.userId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
let rank = 1;
|
||||||
|
if (stats[0]) {
|
||||||
|
const rankResult = await ctx.db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(user_stats)
|
||||||
|
.where(gt(user_stats.total_weighted_points, stats[0].total_weighted_points || 0));
|
||||||
|
rank = (rankResult[0]?.count || 0) + 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
const userAchievements = await ctx.db
|
||||||
|
.select({
|
||||||
|
id: achievements.id,
|
||||||
|
code: achievements.code,
|
||||||
|
name: achievements.name,
|
||||||
|
description: achievements.description,
|
||||||
|
icon: achievements.icon,
|
||||||
|
category: achievements.category,
|
||||||
|
date_unlocked: user_achievements.date_unlocked,
|
||||||
|
progress: user_achievements.progress,
|
||||||
|
required_count: achievements.required_count,
|
||||||
|
})
|
||||||
|
.from(user_achievements)
|
||||||
|
.leftJoin(achievements, eq(user_achievements.achievement_id, achievements.id))
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(user_achievements.user_id, args.userId),
|
||||||
|
isNotNull(user_achievements.date_unlocked),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.orderBy(desc(user_achievements.date_unlocked));
|
||||||
|
|
||||||
|
const recentPoints = await ctx.db
|
||||||
|
.select({
|
||||||
|
action: user_points.action,
|
||||||
|
points: user_points.points,
|
||||||
|
date_created: user_points.date_created,
|
||||||
|
recording_id: user_points.recording_id,
|
||||||
|
})
|
||||||
|
.from(user_points)
|
||||||
|
.where(eq(user_points.user_id, args.userId))
|
||||||
|
.orderBy(desc(user_points.date_created))
|
||||||
|
.limit(10);
|
||||||
|
|
||||||
|
return {
|
||||||
|
stats: stats[0] ? { ...stats[0], rank } : null,
|
||||||
|
achievements: userAchievements.map((a: any) => ({
|
||||||
|
...a,
|
||||||
|
date_unlocked: a.date_unlocked!,
|
||||||
|
})),
|
||||||
|
recent_points: recentPoints,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("achievements", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: [AchievementType],
|
||||||
|
resolve: async (_root, _args, ctx) => {
|
||||||
|
return ctx.db
|
||||||
|
.select()
|
||||||
|
.from(achievements)
|
||||||
|
.where(eq(achievements.status, "published"))
|
||||||
|
.orderBy(achievements.sort);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
73
packages/backend/src/graphql/resolvers/models.ts
Normal file
73
packages/backend/src/graphql/resolvers/models.ts
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
import { builder } from "../builder";
|
||||||
|
import { ModelType, ModelListType } from "../types/index";
|
||||||
|
import { users, user_photos, files } from "../../db/schema/index";
|
||||||
|
import { eq, and, desc, asc, ilike, count, arrayContains } from "drizzle-orm";
|
||||||
|
|
||||||
|
async function enrichModel(db: any, user: any) {
|
||||||
|
// Fetch photos
|
||||||
|
const photoRows = await db
|
||||||
|
.select({ id: files.id, filename: files.filename })
|
||||||
|
.from(user_photos)
|
||||||
|
.leftJoin(files, eq(user_photos.file_id, files.id))
|
||||||
|
.where(eq(user_photos.user_id, user.id))
|
||||||
|
.orderBy(user_photos.sort);
|
||||||
|
|
||||||
|
const seen = new Set<string>();
|
||||||
|
const photos = photoRows
|
||||||
|
.filter((p: any) => p.id && !seen.has(p.id) && seen.add(p.id))
|
||||||
|
.map((p: any) => ({ id: p.id, filename: p.filename }));
|
||||||
|
|
||||||
|
return { ...user, photos };
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.queryField("models", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ModelListType,
|
||||||
|
args: {
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
search: t.arg.string(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
sortBy: t.arg.string(),
|
||||||
|
tag: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const pageSize = args.limit ?? 24;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions: any[] = [eq(users.role, "model")];
|
||||||
|
if (args.search) conditions.push(ilike(users.artist_name, `%${args.search}%`));
|
||||||
|
if (args.tag) conditions.push(arrayContains(users.tags, [args.tag]));
|
||||||
|
|
||||||
|
const order = args.sortBy === "recent" ? desc(users.date_created) : asc(users.artist_name);
|
||||||
|
|
||||||
|
const where = and(...conditions);
|
||||||
|
const [modelList, totalRows] = await Promise.all([
|
||||||
|
ctx.db.select().from(users).where(where).orderBy(order).limit(pageSize).offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(users).where(where),
|
||||||
|
]);
|
||||||
|
const items = await Promise.all(modelList.map((m: any) => enrichModel(ctx.db, m)));
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("model", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: ModelType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
slug: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const model = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(and(eq(users.slug, args.slug), eq(users.role, "model")))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!model[0]) return null;
|
||||||
|
return enrichModel(ctx.db, model[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
391
packages/backend/src/graphql/resolvers/recordings.ts
Normal file
391
packages/backend/src/graphql/resolvers/recordings.ts
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
import { GraphQLError } from "graphql";
|
||||||
|
import { builder } from "../builder";
|
||||||
|
import { RecordingType, AdminRecordingListType } from "../types/index";
|
||||||
|
import { recordings, recording_plays, users } from "../../db/schema/index";
|
||||||
|
import { eq, and, desc, ilike, count } from "drizzle-orm";
|
||||||
|
import { slugify } from "../../lib/slugify";
|
||||||
|
import { awardPoints, checkAchievements } from "../../lib/gamification";
|
||||||
|
import { requireAdmin } from "../../lib/acl";
|
||||||
|
|
||||||
|
builder.queryField("recordings", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: [RecordingType],
|
||||||
|
args: {
|
||||||
|
status: t.arg.string(),
|
||||||
|
tags: t.arg.string(),
|
||||||
|
linkedVideoId: t.arg.string(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
page: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const conditions = [eq(recordings.user_id, ctx.currentUser.id)];
|
||||||
|
if (args.status) conditions.push(eq(recordings.status, args.status as any));
|
||||||
|
if (args.linkedVideoId) conditions.push(eq(recordings.linked_video, args.linkedVideoId));
|
||||||
|
|
||||||
|
const limit = args.limit || 50;
|
||||||
|
const page = args.page || 1;
|
||||||
|
const offset = (page - 1) * limit;
|
||||||
|
|
||||||
|
return ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(and(...conditions))
|
||||||
|
.orderBy(desc(recordings.date_created))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("recording", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: RecordingType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const recording = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(eq(recordings.id, args.id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!recording[0]) return null;
|
||||||
|
|
||||||
|
if (recording[0].user_id !== ctx.currentUser.id && !recording[0].public) {
|
||||||
|
throw new GraphQLError("Forbidden");
|
||||||
|
}
|
||||||
|
|
||||||
|
return recording[0];
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("communityRecordings", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: [RecordingType],
|
||||||
|
args: {
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
return ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(and(eq(recordings.status, "published"), eq(recordings.public, true)))
|
||||||
|
.orderBy(desc(recordings.date_created))
|
||||||
|
.limit(args.limit || 50)
|
||||||
|
.offset(args.offset || 0);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("createRecording", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: RecordingType,
|
||||||
|
args: {
|
||||||
|
title: t.arg.string({ required: true }),
|
||||||
|
description: t.arg.string(),
|
||||||
|
duration: t.arg.int({ required: true }),
|
||||||
|
events: t.arg({ type: "JSON", required: true }),
|
||||||
|
deviceInfo: t.arg({ type: "JSON", required: true }),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
status: t.arg.string(),
|
||||||
|
linkedVideoId: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const slug = slugify(args.title);
|
||||||
|
|
||||||
|
const newRecording = await ctx.db
|
||||||
|
.insert(recordings)
|
||||||
|
.values({
|
||||||
|
title: args.title,
|
||||||
|
description: args.description || null,
|
||||||
|
slug,
|
||||||
|
duration: args.duration,
|
||||||
|
events: (args.events as object[]) || [],
|
||||||
|
device_info: (args.deviceInfo as object[]) || [],
|
||||||
|
user_id: ctx.currentUser.id,
|
||||||
|
tags: args.tags || [],
|
||||||
|
linked_video: args.linkedVideoId || null,
|
||||||
|
status: (args.status as any) || "draft",
|
||||||
|
public: false,
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
const recording = newRecording[0];
|
||||||
|
|
||||||
|
// Gamification (non-blocking)
|
||||||
|
if (recording.status === "published") {
|
||||||
|
awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_CREATE", recording.id)
|
||||||
|
.then(() => checkAchievements(ctx.db, ctx.currentUser!.id, "recordings"))
|
||||||
|
.catch((e) => console.error("Gamification error on recording create:", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
return recording;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("updateRecording", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: RecordingType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
title: t.arg.string(),
|
||||||
|
description: t.arg.string(),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
status: t.arg.string(),
|
||||||
|
public: t.arg.boolean(),
|
||||||
|
linkedVideoId: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(eq(recordings.id, args.id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!existing[0]) throw new GraphQLError("Recording not found");
|
||||||
|
if (existing[0].user_id !== ctx.currentUser.id) throw new GraphQLError("Forbidden");
|
||||||
|
|
||||||
|
const updates: Record<string, unknown> = { date_updated: new Date() };
|
||||||
|
if (args.title !== null && args.title !== undefined) {
|
||||||
|
updates.title = args.title;
|
||||||
|
updates.slug = slugify(args.title);
|
||||||
|
}
|
||||||
|
if (args.description !== null && args.description !== undefined)
|
||||||
|
updates.description = args.description;
|
||||||
|
if (args.tags !== null && args.tags !== undefined) updates.tags = args.tags;
|
||||||
|
if (args.status !== null && args.status !== undefined) updates.status = args.status;
|
||||||
|
if (args.public !== null && args.public !== undefined) updates.public = args.public;
|
||||||
|
if (args.linkedVideoId !== null && args.linkedVideoId !== undefined)
|
||||||
|
updates.linked_video = args.linkedVideoId;
|
||||||
|
|
||||||
|
const updated = await ctx.db
|
||||||
|
.update(recordings)
|
||||||
|
.set(updates as any)
|
||||||
|
.where(eq(recordings.id, args.id))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
const recording = updated[0];
|
||||||
|
|
||||||
|
// Gamification (non-blocking)
|
||||||
|
if (args.status === "published" && existing[0].status !== "published") {
|
||||||
|
awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_CREATE", recording.id)
|
||||||
|
.then(() => checkAchievements(ctx.db, ctx.currentUser!.id, "recordings"))
|
||||||
|
.catch((e) => console.error("Gamification error on recording publish:", e));
|
||||||
|
} else if (args.status === "published" && recording.featured && !existing[0].featured) {
|
||||||
|
awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_FEATURED", recording.id)
|
||||||
|
.then(() => checkAchievements(ctx.db, ctx.currentUser!.id, "recordings"))
|
||||||
|
.catch((e) => console.error("Gamification error on recording feature:", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
return recording;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("deleteRecording", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(eq(recordings.id, args.id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!existing[0]) throw new GraphQLError("Recording not found");
|
||||||
|
if (existing[0].user_id !== ctx.currentUser.id) throw new GraphQLError("Forbidden");
|
||||||
|
|
||||||
|
await ctx.db.delete(recordings).where(eq(recordings.id, args.id));
|
||||||
|
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("duplicateRecording", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: RecordingType,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const original = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(eq(recordings.id, args.id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!original[0]) throw new GraphQLError("Recording not found");
|
||||||
|
if (original[0].status !== "published" || !original[0].public) {
|
||||||
|
throw new GraphQLError("Recording is not publicly shared");
|
||||||
|
}
|
||||||
|
|
||||||
|
const slug = `${slugify(original[0].title)}-copy-${Date.now()}`;
|
||||||
|
|
||||||
|
const duplicated = await ctx.db
|
||||||
|
.insert(recordings)
|
||||||
|
.values({
|
||||||
|
title: `${original[0].title} (Copy)`,
|
||||||
|
description: original[0].description,
|
||||||
|
slug,
|
||||||
|
duration: original[0].duration,
|
||||||
|
events: original[0].events || [],
|
||||||
|
device_info: original[0].device_info || [],
|
||||||
|
user_id: ctx.currentUser.id,
|
||||||
|
tags: original[0].tags || [],
|
||||||
|
status: "draft",
|
||||||
|
public: false,
|
||||||
|
original_recording_id: original[0].id,
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return duplicated[0];
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("recordRecordingPlay", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "JSON",
|
||||||
|
args: {
|
||||||
|
recordingId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const recording = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(eq(recordings.id, args.recordingId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!recording[0]) throw new GraphQLError("Recording not found");
|
||||||
|
|
||||||
|
const play = await ctx.db
|
||||||
|
.insert(recording_plays)
|
||||||
|
.values({
|
||||||
|
recording_id: args.recordingId,
|
||||||
|
user_id: ctx.currentUser?.id || null,
|
||||||
|
duration_played: 0,
|
||||||
|
completed: false,
|
||||||
|
})
|
||||||
|
.returning({ id: recording_plays.id });
|
||||||
|
|
||||||
|
// Gamification (non-blocking)
|
||||||
|
if (ctx.currentUser && recording[0].user_id !== ctx.currentUser.id) {
|
||||||
|
awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_PLAY", args.recordingId)
|
||||||
|
.then(() => checkAchievements(ctx.db, ctx.currentUser!.id, "playback"))
|
||||||
|
.catch((e) => console.error("Gamification error on recording play:", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
return { success: true, play_id: play[0].id };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("updateRecordingPlay", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
playId: t.arg.string({ required: true }),
|
||||||
|
durationPlayed: t.arg.int({ required: true }),
|
||||||
|
completed: t.arg.boolean({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recording_plays)
|
||||||
|
.where(eq(recording_plays.id, args.playId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!existing[0]) throw new GraphQLError("Play record not found");
|
||||||
|
const wasCompleted = existing[0].completed;
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(recording_plays)
|
||||||
|
.set({
|
||||||
|
duration_played: args.durationPlayed,
|
||||||
|
completed: args.completed,
|
||||||
|
date_updated: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(recording_plays.id, args.playId));
|
||||||
|
|
||||||
|
// Gamification (non-blocking)
|
||||||
|
if (args.completed && !wasCompleted && ctx.currentUser) {
|
||||||
|
awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_COMPLETE", existing[0].recording_id)
|
||||||
|
.then(() => checkAchievements(ctx.db, ctx.currentUser!.id, "playback"))
|
||||||
|
.catch((e) => console.error("Gamification error on recording complete:", e));
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("adminListRecordings", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: AdminRecordingListType,
|
||||||
|
args: {
|
||||||
|
search: t.arg.string(),
|
||||||
|
status: t.arg.string(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const limit = args.limit ?? 50;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions: any[] = [];
|
||||||
|
if (args.search) conditions.push(ilike(recordings.title, `%${args.search}%`));
|
||||||
|
if (args.status) conditions.push(eq(recordings.status, args.status as any));
|
||||||
|
const where = conditions.length > 0 ? and(...conditions) : undefined;
|
||||||
|
|
||||||
|
const [rows, totalRows] = await Promise.all([
|
||||||
|
ctx.db
|
||||||
|
.select()
|
||||||
|
.from(recordings)
|
||||||
|
.where(where)
|
||||||
|
.orderBy(desc(recordings.date_created))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(recordings).where(where),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return { items: rows, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("adminDeleteRecording", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
await ctx.db.delete(recordings).where(eq(recordings.id, args.id));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
27
packages/backend/src/graphql/resolvers/stats.ts
Normal file
27
packages/backend/src/graphql/resolvers/stats.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { builder } from "../builder";
|
||||||
|
import { StatsType } from "../types/index";
|
||||||
|
import { users, videos } from "../../db/schema/index";
|
||||||
|
import { eq, count } from "drizzle-orm";
|
||||||
|
|
||||||
|
builder.queryField("stats", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: StatsType,
|
||||||
|
resolve: async (_root, _args, ctx) => {
|
||||||
|
const modelsCount = await ctx.db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.role, "model"));
|
||||||
|
const viewersCount = await ctx.db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.role, "viewer"));
|
||||||
|
const videosCount = await ctx.db.select({ count: count() }).from(videos);
|
||||||
|
|
||||||
|
return {
|
||||||
|
models_count: modelsCount[0]?.count || 0,
|
||||||
|
viewers_count: viewersCount[0]?.count || 0,
|
||||||
|
videos_count: videosCount[0]?.count || 0,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
232
packages/backend/src/graphql/resolvers/users.ts
Normal file
232
packages/backend/src/graphql/resolvers/users.ts
Normal file
@@ -0,0 +1,232 @@
|
|||||||
|
import { GraphQLError } from "graphql";
|
||||||
|
import { builder } from "../builder";
|
||||||
|
import { CurrentUserType, UserType, AdminUserListType, AdminUserDetailType } from "../types/index";
|
||||||
|
import { users, user_photos, files } from "../../db/schema/index";
|
||||||
|
import { eq, ilike, or, count, and } from "drizzle-orm";
|
||||||
|
import { requireAdmin } from "../../lib/acl";
|
||||||
|
|
||||||
|
builder.queryField("me", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: CurrentUserType,
|
||||||
|
nullable: true,
|
||||||
|
resolve: async (_root, _args, ctx) => {
|
||||||
|
if (!ctx.currentUser) return null;
|
||||||
|
const user = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, ctx.currentUser.id))
|
||||||
|
.limit(1);
|
||||||
|
return user[0] || null;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("userProfile", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: UserType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const user = await ctx.db.select().from(users).where(eq(users.id, args.id)).limit(1);
|
||||||
|
return user[0] || null;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("updateProfile", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: CurrentUserType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
firstName: t.arg.string(),
|
||||||
|
lastName: t.arg.string(),
|
||||||
|
artistName: t.arg.string(),
|
||||||
|
description: t.arg.string(),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const updates: Record<string, unknown> = { date_updated: new Date() };
|
||||||
|
if (args.firstName !== undefined && args.firstName !== null)
|
||||||
|
updates.first_name = args.firstName;
|
||||||
|
if (args.lastName !== undefined && args.lastName !== null) updates.last_name = args.lastName;
|
||||||
|
if (args.artistName !== undefined && args.artistName !== null)
|
||||||
|
updates.artist_name = args.artistName;
|
||||||
|
if (args.description !== undefined && args.description !== null)
|
||||||
|
updates.description = args.description;
|
||||||
|
if (args.tags !== undefined && args.tags !== null) updates.tags = args.tags;
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(users)
|
||||||
|
.set(updates as any)
|
||||||
|
.where(eq(users.id, ctx.currentUser.id));
|
||||||
|
|
||||||
|
const updated = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(users)
|
||||||
|
.where(eq(users.id, ctx.currentUser.id))
|
||||||
|
.limit(1);
|
||||||
|
return updated[0] || null;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Admin queries & mutations ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
builder.queryField("adminListUsers", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: AdminUserListType,
|
||||||
|
args: {
|
||||||
|
role: t.arg.string(),
|
||||||
|
search: t.arg.string(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
|
||||||
|
const limit = args.limit ?? 50;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
let query = ctx.db.select().from(users);
|
||||||
|
let countQuery = ctx.db.select({ total: count() }).from(users);
|
||||||
|
|
||||||
|
const conditions: any[] = [];
|
||||||
|
if (args.role) {
|
||||||
|
conditions.push(eq(users.role, args.role as any));
|
||||||
|
}
|
||||||
|
if (args.search) {
|
||||||
|
const pattern = `%${args.search}%`;
|
||||||
|
conditions.push(or(ilike(users.email, pattern), ilike(users.artist_name, pattern)));
|
||||||
|
}
|
||||||
|
|
||||||
|
if (conditions.length > 0) {
|
||||||
|
const where = conditions.length === 1 ? conditions[0] : and(...conditions);
|
||||||
|
query = (query as any).where(where);
|
||||||
|
countQuery = (countQuery as any).where(where);
|
||||||
|
}
|
||||||
|
|
||||||
|
const [items, totalRows] = await Promise.all([
|
||||||
|
(query as any).limit(limit).offset(offset),
|
||||||
|
countQuery,
|
||||||
|
]);
|
||||||
|
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("adminUpdateUser", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: UserType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
userId: t.arg.string({ required: true }),
|
||||||
|
role: t.arg.string(),
|
||||||
|
isAdmin: t.arg.boolean(),
|
||||||
|
firstName: t.arg.string(),
|
||||||
|
lastName: t.arg.string(),
|
||||||
|
artistName: t.arg.string(),
|
||||||
|
avatarId: t.arg.string(),
|
||||||
|
bannerId: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
|
||||||
|
const updates: Record<string, unknown> = { date_updated: new Date() };
|
||||||
|
if (args.role !== undefined && args.role !== null) updates.role = args.role as any;
|
||||||
|
if (args.isAdmin !== undefined && args.isAdmin !== null) updates.is_admin = args.isAdmin;
|
||||||
|
if (args.firstName !== undefined && args.firstName !== null)
|
||||||
|
updates.first_name = args.firstName;
|
||||||
|
if (args.lastName !== undefined && args.lastName !== null) updates.last_name = args.lastName;
|
||||||
|
if (args.artistName !== undefined && args.artistName !== null)
|
||||||
|
updates.artist_name = args.artistName;
|
||||||
|
if (args.avatarId !== undefined && args.avatarId !== null) updates.avatar = args.avatarId;
|
||||||
|
if (args.bannerId !== undefined && args.bannerId !== null) updates.banner = args.bannerId;
|
||||||
|
|
||||||
|
const updated = await ctx.db
|
||||||
|
.update(users)
|
||||||
|
.set(updates as any)
|
||||||
|
.where(eq(users.id, args.userId))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return updated[0] || null;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("adminDeleteUser", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
userId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
if (args.userId === ctx.currentUser!.id) throw new GraphQLError("Cannot delete yourself");
|
||||||
|
await ctx.db.delete(users).where(eq(users.id, args.userId));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("adminGetUser", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: AdminUserDetailType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
userId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const user = await ctx.db.select().from(users).where(eq(users.id, args.userId)).limit(1);
|
||||||
|
if (!user[0]) return null;
|
||||||
|
const photoRows = await ctx.db
|
||||||
|
.select({ id: files.id, filename: files.filename })
|
||||||
|
.from(user_photos)
|
||||||
|
.leftJoin(files, eq(user_photos.file_id, files.id))
|
||||||
|
.where(eq(user_photos.user_id, args.userId))
|
||||||
|
.orderBy(user_photos.sort);
|
||||||
|
const seen = new Set<string>();
|
||||||
|
const photos = photoRows
|
||||||
|
.filter((p: any) => p.id && !seen.has(p.id) && seen.add(p.id))
|
||||||
|
.map((p: any) => ({ id: p.id, filename: p.filename }));
|
||||||
|
return { ...user[0], photos };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("adminAddUserPhoto", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
userId: t.arg.string({ required: true }),
|
||||||
|
fileId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
await ctx.db.insert(user_photos).values({ user_id: args.userId, file_id: args.fileId });
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("adminRemoveUserPhoto", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
userId: t.arg.string({ required: true }),
|
||||||
|
fileId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
await ctx.db
|
||||||
|
.delete(user_photos)
|
||||||
|
.where(and(eq(user_photos.user_id, args.userId), eq(user_photos.file_id, args.fileId)));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
637
packages/backend/src/graphql/resolvers/videos.ts
Normal file
637
packages/backend/src/graphql/resolvers/videos.ts
Normal file
@@ -0,0 +1,637 @@
|
|||||||
|
import { GraphQLError } from "graphql";
|
||||||
|
import { builder } from "../builder";
|
||||||
|
import {
|
||||||
|
VideoType,
|
||||||
|
VideoListType,
|
||||||
|
AdminVideoListType,
|
||||||
|
VideoLikeResponseType,
|
||||||
|
VideoPlayResponseType,
|
||||||
|
VideoLikeStatusType,
|
||||||
|
} from "../types/index";
|
||||||
|
import {
|
||||||
|
videos,
|
||||||
|
video_models,
|
||||||
|
video_likes,
|
||||||
|
video_plays,
|
||||||
|
users,
|
||||||
|
files,
|
||||||
|
} from "../../db/schema/index";
|
||||||
|
import {
|
||||||
|
eq,
|
||||||
|
and,
|
||||||
|
lte,
|
||||||
|
desc,
|
||||||
|
asc,
|
||||||
|
inArray,
|
||||||
|
count,
|
||||||
|
ilike,
|
||||||
|
lt,
|
||||||
|
gte,
|
||||||
|
arrayContains,
|
||||||
|
} from "drizzle-orm";
|
||||||
|
import { requireAdmin } from "../../lib/acl";
|
||||||
|
|
||||||
|
async function enrichVideo(db: any, video: any) {
|
||||||
|
// Fetch models
|
||||||
|
const modelRows = await db
|
||||||
|
.select({
|
||||||
|
id: users.id,
|
||||||
|
artist_name: users.artist_name,
|
||||||
|
slug: users.slug,
|
||||||
|
avatar: users.avatar,
|
||||||
|
})
|
||||||
|
.from(video_models)
|
||||||
|
.leftJoin(users, eq(video_models.user_id, users.id))
|
||||||
|
.where(eq(video_models.video_id, video.id));
|
||||||
|
|
||||||
|
// Fetch movie file
|
||||||
|
let movieFile = null;
|
||||||
|
if (video.movie) {
|
||||||
|
const mf = await db.select().from(files).where(eq(files.id, video.movie)).limit(1);
|
||||||
|
movieFile = mf[0] || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count likes
|
||||||
|
const likesCount = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(video_likes)
|
||||||
|
.where(eq(video_likes.video_id, video.id));
|
||||||
|
const playsCount = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(video_plays)
|
||||||
|
.where(eq(video_plays.video_id, video.id));
|
||||||
|
|
||||||
|
return {
|
||||||
|
...video,
|
||||||
|
models: modelRows,
|
||||||
|
movie_file: movieFile,
|
||||||
|
likes_count: likesCount[0]?.count || 0,
|
||||||
|
plays_count: playsCount[0]?.count || 0,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
builder.queryField("videos", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoListType,
|
||||||
|
args: {
|
||||||
|
modelId: t.arg.string(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
search: t.arg.string(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
sortBy: t.arg.string(),
|
||||||
|
duration: t.arg.string(),
|
||||||
|
tag: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const pageSize = args.limit ?? 24;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions: any[] = [lte(videos.upload_date, new Date())];
|
||||||
|
if (!ctx.currentUser) conditions.push(eq(videos.premium, false));
|
||||||
|
if (args.featured !== null && args.featured !== undefined) {
|
||||||
|
conditions.push(eq(videos.featured, args.featured));
|
||||||
|
}
|
||||||
|
if (args.search) {
|
||||||
|
conditions.push(ilike(videos.title, `%${args.search}%`));
|
||||||
|
}
|
||||||
|
if (args.tag) {
|
||||||
|
conditions.push(arrayContains(videos.tags, [args.tag]));
|
||||||
|
}
|
||||||
|
if (args.modelId) {
|
||||||
|
const videoIds = await ctx.db
|
||||||
|
.select({ video_id: video_models.video_id })
|
||||||
|
.from(video_models)
|
||||||
|
.where(eq(video_models.user_id, args.modelId));
|
||||||
|
if (videoIds.length === 0) return { items: [], total: 0 };
|
||||||
|
conditions.push(
|
||||||
|
inArray(
|
||||||
|
videos.id,
|
||||||
|
videoIds.map((v: any) => v.video_id),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const order =
|
||||||
|
args.sortBy === "most_liked"
|
||||||
|
? desc(videos.likes_count)
|
||||||
|
: args.sortBy === "most_played"
|
||||||
|
? desc(videos.plays_count)
|
||||||
|
: args.sortBy === "name"
|
||||||
|
? asc(videos.title)
|
||||||
|
: desc(videos.upload_date);
|
||||||
|
|
||||||
|
const where = and(...conditions);
|
||||||
|
|
||||||
|
// Duration filter requires JOIN to files table
|
||||||
|
if (args.duration && args.duration !== "all") {
|
||||||
|
const durationCond =
|
||||||
|
args.duration === "short"
|
||||||
|
? lt(files.duration, 600)
|
||||||
|
: args.duration === "medium"
|
||||||
|
? and(gte(files.duration, 600), lt(files.duration, 1200))
|
||||||
|
: gte(files.duration, 1200);
|
||||||
|
|
||||||
|
const fullWhere = and(where, durationCond);
|
||||||
|
const [rows, totalRows] = await Promise.all([
|
||||||
|
ctx.db
|
||||||
|
.select({ v: videos })
|
||||||
|
.from(videos)
|
||||||
|
.leftJoin(files, eq(videos.movie, files.id))
|
||||||
|
.where(fullWhere)
|
||||||
|
.orderBy(order)
|
||||||
|
.limit(pageSize)
|
||||||
|
.offset(offset),
|
||||||
|
ctx.db
|
||||||
|
.select({ total: count() })
|
||||||
|
.from(videos)
|
||||||
|
.leftJoin(files, eq(videos.movie, files.id))
|
||||||
|
.where(fullWhere),
|
||||||
|
]);
|
||||||
|
const videoList = rows.map((r: any) => r.v || r);
|
||||||
|
const items = await Promise.all(videoList.map((v: any) => enrichVideo(ctx.db, v)));
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
}
|
||||||
|
|
||||||
|
const [rows, totalRows] = await Promise.all([
|
||||||
|
ctx.db.select().from(videos).where(where).orderBy(order).limit(pageSize).offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(videos).where(where),
|
||||||
|
]);
|
||||||
|
const items = await Promise.all(rows.map((v: any) => enrichVideo(ctx.db, v)));
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("video", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
slug: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const video = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(videos)
|
||||||
|
.where(and(eq(videos.slug, args.slug), lte(videos.upload_date, new Date())))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!video[0]) return null;
|
||||||
|
|
||||||
|
if (video[0].premium && !ctx.currentUser) {
|
||||||
|
throw new GraphQLError("Unauthorized");
|
||||||
|
}
|
||||||
|
|
||||||
|
return enrichVideo(ctx.db, video[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("adminGetVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const video = await ctx.db.select().from(videos).where(eq(videos.id, args.id)).limit(1);
|
||||||
|
if (!video[0]) return null;
|
||||||
|
return enrichVideo(ctx.db, video[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("videoLikeStatus", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoLikeStatusType,
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) return { liked: false };
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(video_likes)
|
||||||
|
.where(
|
||||||
|
and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id)),
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
return { liked: existing.length > 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("likeVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoLikeResponseType,
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(video_likes)
|
||||||
|
.where(
|
||||||
|
and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id)),
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (existing.length > 0) throw new GraphQLError("Already liked");
|
||||||
|
|
||||||
|
await ctx.db.insert(video_likes).values({
|
||||||
|
video_id: args.videoId,
|
||||||
|
user_id: ctx.currentUser.id,
|
||||||
|
});
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(videos)
|
||||||
|
.set({
|
||||||
|
likes_count:
|
||||||
|
((
|
||||||
|
await ctx.db
|
||||||
|
.select({ c: videos.likes_count })
|
||||||
|
.from(videos)
|
||||||
|
.where(eq(videos.id, args.videoId))
|
||||||
|
.limit(1)
|
||||||
|
)[0]?.c as number) + 1 || 1,
|
||||||
|
})
|
||||||
|
.where(eq(videos.id, args.videoId));
|
||||||
|
|
||||||
|
const likesCount = await ctx.db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(video_likes)
|
||||||
|
.where(eq(video_likes.video_id, args.videoId));
|
||||||
|
return { liked: true, likes_count: likesCount[0]?.count || 1 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("unlikeVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoLikeResponseType,
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
|
||||||
|
const existing = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(video_likes)
|
||||||
|
.where(
|
||||||
|
and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id)),
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (existing.length === 0) throw new GraphQLError("Not liked");
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.delete(video_likes)
|
||||||
|
.where(
|
||||||
|
and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id)),
|
||||||
|
);
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(videos)
|
||||||
|
.set({
|
||||||
|
likes_count: Math.max(
|
||||||
|
(((
|
||||||
|
await ctx.db
|
||||||
|
.select({ c: videos.likes_count })
|
||||||
|
.from(videos)
|
||||||
|
.where(eq(videos.id, args.videoId))
|
||||||
|
.limit(1)
|
||||||
|
)[0]?.c as number) || 1) - 1,
|
||||||
|
0,
|
||||||
|
),
|
||||||
|
})
|
||||||
|
.where(eq(videos.id, args.videoId));
|
||||||
|
|
||||||
|
const likesCount = await ctx.db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(video_likes)
|
||||||
|
.where(eq(video_likes.video_id, args.videoId));
|
||||||
|
return { liked: false, likes_count: likesCount[0]?.count || 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("recordVideoPlay", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoPlayResponseType,
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
sessionId: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const play = await ctx.db
|
||||||
|
.insert(video_plays)
|
||||||
|
.values({
|
||||||
|
video_id: args.videoId,
|
||||||
|
user_id: ctx.currentUser?.id || null,
|
||||||
|
session_id: args.sessionId || null,
|
||||||
|
})
|
||||||
|
.returning({ id: video_plays.id });
|
||||||
|
|
||||||
|
const playsCount = await ctx.db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(video_plays)
|
||||||
|
.where(eq(video_plays.video_id, args.videoId));
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(videos)
|
||||||
|
.set({ plays_count: playsCount[0]?.count || 0 })
|
||||||
|
.where(eq(videos.id, args.videoId));
|
||||||
|
|
||||||
|
return {
|
||||||
|
success: true,
|
||||||
|
play_id: play[0].id,
|
||||||
|
plays_count: playsCount[0]?.count || 0,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("updateVideoPlay", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
playId: t.arg.string({ required: true }),
|
||||||
|
durationWatched: t.arg.int({ required: true }),
|
||||||
|
completed: t.arg.boolean({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
const play = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(video_plays)
|
||||||
|
.where(eq(video_plays.id, args.playId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!play[0]) return false;
|
||||||
|
|
||||||
|
// If play belongs to a user, verify ownership
|
||||||
|
if (play[0].user_id && (!ctx.currentUser || play[0].user_id !== ctx.currentUser.id)) {
|
||||||
|
throw new GraphQLError("Forbidden");
|
||||||
|
}
|
||||||
|
|
||||||
|
await ctx.db
|
||||||
|
.update(video_plays)
|
||||||
|
.set({
|
||||||
|
duration_watched: args.durationWatched,
|
||||||
|
completed: args.completed,
|
||||||
|
date_updated: new Date(),
|
||||||
|
})
|
||||||
|
.where(eq(video_plays.id, args.playId));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.queryField("analytics", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "JSON",
|
||||||
|
nullable: true,
|
||||||
|
resolve: async (_root, _args, ctx) => {
|
||||||
|
if (!ctx.currentUser || ctx.currentUser.role !== "model") {
|
||||||
|
throw new GraphQLError("Unauthorized");
|
||||||
|
}
|
||||||
|
|
||||||
|
const userId = ctx.currentUser.id;
|
||||||
|
|
||||||
|
// Get all videos by this user (via video_models)
|
||||||
|
const modelVideoIds = await ctx.db
|
||||||
|
.select({ video_id: video_models.video_id })
|
||||||
|
.from(video_models)
|
||||||
|
.where(eq(video_models.user_id, userId));
|
||||||
|
|
||||||
|
if (modelVideoIds.length === 0) {
|
||||||
|
return {
|
||||||
|
total_videos: 0,
|
||||||
|
total_likes: 0,
|
||||||
|
total_plays: 0,
|
||||||
|
plays_by_date: {},
|
||||||
|
likes_by_date: {},
|
||||||
|
videos: [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const videoIds = modelVideoIds.map((v: any) => v.video_id);
|
||||||
|
const videoList = await ctx.db.select().from(videos).where(inArray(videos.id, videoIds));
|
||||||
|
const plays = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(video_plays)
|
||||||
|
.where(inArray(video_plays.video_id, videoIds));
|
||||||
|
const likes = await ctx.db
|
||||||
|
.select()
|
||||||
|
.from(video_likes)
|
||||||
|
.where(inArray(video_likes.video_id, videoIds));
|
||||||
|
|
||||||
|
const totalLikes = videoList.reduce((sum, v) => sum + (v.likes_count || 0), 0);
|
||||||
|
const totalPlays = videoList.reduce((sum, v) => sum + (v.plays_count || 0), 0);
|
||||||
|
|
||||||
|
const playsByDate = plays.reduce((acc: any, play) => {
|
||||||
|
const date = new Date(play.date_created).toISOString().split("T")[0];
|
||||||
|
if (!acc[date]) acc[date] = 0;
|
||||||
|
acc[date]++;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const likesByDate = likes.reduce((acc: any, like) => {
|
||||||
|
const date = new Date(like.date_created).toISOString().split("T")[0];
|
||||||
|
if (!acc[date]) acc[date] = 0;
|
||||||
|
acc[date]++;
|
||||||
|
return acc;
|
||||||
|
}, {});
|
||||||
|
|
||||||
|
const videoAnalytics = videoList.map((video) => {
|
||||||
|
const vPlays = plays.filter((p) => p.video_id === video.id);
|
||||||
|
const completedPlays = vPlays.filter((p) => p.completed).length;
|
||||||
|
const avgWatchTime =
|
||||||
|
vPlays.length > 0
|
||||||
|
? vPlays.reduce((sum, p) => sum + (p.duration_watched || 0), 0) / vPlays.length
|
||||||
|
: 0;
|
||||||
|
|
||||||
|
return {
|
||||||
|
id: video.id,
|
||||||
|
title: video.title,
|
||||||
|
slug: video.slug,
|
||||||
|
upload_date: video.upload_date,
|
||||||
|
likes: video.likes_count || 0,
|
||||||
|
plays: video.plays_count || 0,
|
||||||
|
completed_plays: completedPlays,
|
||||||
|
completion_rate: video.plays_count ? (completedPlays / video.plays_count) * 100 : 0,
|
||||||
|
avg_watch_time: Math.round(avgWatchTime),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
total_videos: videoList.length,
|
||||||
|
total_likes: totalLikes,
|
||||||
|
total_plays: totalPlays,
|
||||||
|
plays_by_date: playsByDate,
|
||||||
|
likes_by_date: likesByDate,
|
||||||
|
videos: videoAnalytics,
|
||||||
|
};
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
// ─── Admin queries & mutations ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
builder.queryField("adminListVideos", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: AdminVideoListType,
|
||||||
|
args: {
|
||||||
|
search: t.arg.string(),
|
||||||
|
premium: t.arg.boolean(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
limit: t.arg.int(),
|
||||||
|
offset: t.arg.int(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const limit = args.limit ?? 50;
|
||||||
|
const offset = args.offset ?? 0;
|
||||||
|
|
||||||
|
const conditions: any[] = [];
|
||||||
|
if (args.search) conditions.push(ilike(videos.title, `%${args.search}%`));
|
||||||
|
if (args.premium !== null && args.premium !== undefined)
|
||||||
|
conditions.push(eq(videos.premium, args.premium));
|
||||||
|
if (args.featured !== null && args.featured !== undefined)
|
||||||
|
conditions.push(eq(videos.featured, args.featured));
|
||||||
|
|
||||||
|
const where = conditions.length > 0 ? and(...conditions) : undefined;
|
||||||
|
const [rows, totalRows] = await Promise.all([
|
||||||
|
ctx.db
|
||||||
|
.select()
|
||||||
|
.from(videos)
|
||||||
|
.where(where)
|
||||||
|
.orderBy(desc(videos.upload_date))
|
||||||
|
.limit(limit)
|
||||||
|
.offset(offset),
|
||||||
|
ctx.db.select({ total: count() }).from(videos).where(where),
|
||||||
|
]);
|
||||||
|
const items = await Promise.all(rows.map((v: any) => enrichVideo(ctx.db, v)));
|
||||||
|
return { items, total: totalRows[0]?.total ?? 0 };
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("createVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoType,
|
||||||
|
args: {
|
||||||
|
title: t.arg.string({ required: true }),
|
||||||
|
slug: t.arg.string({ required: true }),
|
||||||
|
description: t.arg.string(),
|
||||||
|
imageId: t.arg.string(),
|
||||||
|
movieId: t.arg.string(),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
premium: t.arg.boolean(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
uploadDate: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const inserted = await ctx.db
|
||||||
|
.insert(videos)
|
||||||
|
.values({
|
||||||
|
title: args.title,
|
||||||
|
slug: args.slug,
|
||||||
|
description: args.description || null,
|
||||||
|
image: args.imageId || null,
|
||||||
|
movie: args.movieId || null,
|
||||||
|
tags: args.tags || [],
|
||||||
|
premium: args.premium ?? false,
|
||||||
|
featured: args.featured ?? false,
|
||||||
|
upload_date: args.uploadDate ? new Date(args.uploadDate) : new Date(),
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
return enrichVideo(ctx.db, inserted[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("updateVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: VideoType,
|
||||||
|
nullable: true,
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
title: t.arg.string(),
|
||||||
|
slug: t.arg.string(),
|
||||||
|
description: t.arg.string(),
|
||||||
|
imageId: t.arg.string(),
|
||||||
|
movieId: t.arg.string(),
|
||||||
|
tags: t.arg.stringList(),
|
||||||
|
premium: t.arg.boolean(),
|
||||||
|
featured: t.arg.boolean(),
|
||||||
|
uploadDate: t.arg.string(),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
const updates: Record<string, unknown> = {};
|
||||||
|
if (args.title !== undefined && args.title !== null) updates.title = args.title;
|
||||||
|
if (args.slug !== undefined && args.slug !== null) updates.slug = args.slug;
|
||||||
|
if (args.description !== undefined) updates.description = args.description;
|
||||||
|
if (args.imageId !== undefined) updates.image = args.imageId;
|
||||||
|
if (args.movieId !== undefined) updates.movie = args.movieId;
|
||||||
|
if (args.tags !== undefined && args.tags !== null) updates.tags = args.tags;
|
||||||
|
if (args.premium !== undefined && args.premium !== null) updates.premium = args.premium;
|
||||||
|
if (args.featured !== undefined && args.featured !== null) updates.featured = args.featured;
|
||||||
|
if (args.uploadDate !== undefined && args.uploadDate !== null)
|
||||||
|
updates.upload_date = new Date(args.uploadDate);
|
||||||
|
|
||||||
|
const updated = await ctx.db
|
||||||
|
.update(videos)
|
||||||
|
.set(updates as any)
|
||||||
|
.where(eq(videos.id, args.id))
|
||||||
|
.returning();
|
||||||
|
if (!updated[0]) return null;
|
||||||
|
return enrichVideo(ctx.db, updated[0]);
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("deleteVideo", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
id: t.arg.string({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
await ctx.db.delete(videos).where(eq(videos.id, args.id));
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
builder.mutationField("setVideoModels", (t) =>
|
||||||
|
t.field({
|
||||||
|
type: "Boolean",
|
||||||
|
args: {
|
||||||
|
videoId: t.arg.string({ required: true }),
|
||||||
|
userIds: t.arg.stringList({ required: true }),
|
||||||
|
},
|
||||||
|
resolve: async (_root, args, ctx) => {
|
||||||
|
requireAdmin(ctx);
|
||||||
|
await ctx.db.delete(video_models).where(eq(video_models.video_id, args.videoId));
|
||||||
|
if (args.userIds.length > 0) {
|
||||||
|
await ctx.db.insert(video_models).values(
|
||||||
|
args.userIds.map((userId) => ({
|
||||||
|
video_id: args.videoId,
|
||||||
|
user_id: userId,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
);
|
||||||
422
packages/backend/src/graphql/types/index.ts
Normal file
422
packages/backend/src/graphql/types/index.ts
Normal file
@@ -0,0 +1,422 @@
|
|||||||
|
import type {
|
||||||
|
MediaFile,
|
||||||
|
User,
|
||||||
|
VideoModel,
|
||||||
|
VideoFile,
|
||||||
|
Video,
|
||||||
|
ModelPhoto,
|
||||||
|
Model,
|
||||||
|
Article,
|
||||||
|
CommentUser,
|
||||||
|
Comment,
|
||||||
|
Stats,
|
||||||
|
Recording,
|
||||||
|
VideoLikeStatus,
|
||||||
|
VideoLikeResponse,
|
||||||
|
VideoPlayResponse,
|
||||||
|
VideoAnalytics,
|
||||||
|
Analytics,
|
||||||
|
LeaderboardEntry,
|
||||||
|
UserStats,
|
||||||
|
UserAchievement,
|
||||||
|
RecentPoint,
|
||||||
|
UserGamification,
|
||||||
|
Achievement,
|
||||||
|
} from "@sexy.pivoine.art/types";
|
||||||
|
|
||||||
|
type AdminUserDetail = User & { photos: ModelPhoto[] };
|
||||||
|
import { builder } from "../builder";
|
||||||
|
|
||||||
|
export const FileType = builder.objectRef<MediaFile>("File").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
title: t.exposeString("title", { nullable: true }),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
filename: t.exposeString("filename"),
|
||||||
|
mime_type: t.exposeString("mime_type", { nullable: true }),
|
||||||
|
filesize: t.exposeFloat("filesize", { nullable: true }),
|
||||||
|
duration: t.exposeInt("duration", { nullable: true }),
|
||||||
|
uploaded_by: t.exposeString("uploaded_by", { nullable: true }),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UserType = builder.objectRef<User>("User").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
email: t.exposeString("email"),
|
||||||
|
first_name: t.exposeString("first_name", { nullable: true }),
|
||||||
|
last_name: t.exposeString("last_name", { nullable: true }),
|
||||||
|
artist_name: t.exposeString("artist_name", { nullable: true }),
|
||||||
|
slug: t.exposeString("slug", { nullable: true }),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
role: t.exposeString("role"),
|
||||||
|
is_admin: t.exposeBoolean("is_admin"),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
banner: t.exposeString("banner", { nullable: true }),
|
||||||
|
email_verified: t.exposeBoolean("email_verified"),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
// CurrentUser is the same shape as User
|
||||||
|
export const CurrentUserType = builder.objectRef<User>("CurrentUser").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
email: t.exposeString("email"),
|
||||||
|
first_name: t.exposeString("first_name", { nullable: true }),
|
||||||
|
last_name: t.exposeString("last_name", { nullable: true }),
|
||||||
|
artist_name: t.exposeString("artist_name", { nullable: true }),
|
||||||
|
slug: t.exposeString("slug", { nullable: true }),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
role: t.exposeString("role"),
|
||||||
|
is_admin: t.exposeBoolean("is_admin"),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
banner: t.exposeString("banner", { nullable: true }),
|
||||||
|
email_verified: t.exposeBoolean("email_verified"),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoModelType = builder.objectRef<VideoModel>("VideoModel").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
artist_name: t.exposeString("artist_name", { nullable: true }),
|
||||||
|
slug: t.exposeString("slug", { nullable: true }),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoFileType = builder.objectRef<VideoFile>("VideoFile").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
filename: t.exposeString("filename"),
|
||||||
|
mime_type: t.exposeString("mime_type", { nullable: true }),
|
||||||
|
duration: t.exposeInt("duration", { nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoType = builder.objectRef<Video>("Video").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
slug: t.exposeString("slug"),
|
||||||
|
title: t.exposeString("title"),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
image: t.exposeString("image", { nullable: true }),
|
||||||
|
movie: t.exposeString("movie", { nullable: true }),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
upload_date: t.expose("upload_date", { type: "DateTime" }),
|
||||||
|
premium: t.exposeBoolean("premium", { nullable: true }),
|
||||||
|
featured: t.exposeBoolean("featured", { nullable: true }),
|
||||||
|
likes_count: t.exposeInt("likes_count", { nullable: true }),
|
||||||
|
plays_count: t.exposeInt("plays_count", { nullable: true }),
|
||||||
|
models: t.expose("models", { type: [VideoModelType], nullable: true }),
|
||||||
|
movie_file: t.expose("movie_file", { type: VideoFileType, nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ModelPhotoType = builder.objectRef<ModelPhoto>("ModelPhoto").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
filename: t.exposeString("filename"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ModelType = builder.objectRef<Model>("Model").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
slug: t.exposeString("slug", { nullable: true }),
|
||||||
|
artist_name: t.exposeString("artist_name", { nullable: true }),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
banner: t.exposeString("banner", { nullable: true }),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
photos: t.expose("photos", { type: [ModelPhotoType], nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ArticleType = builder.objectRef<Article>("Article").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
slug: t.exposeString("slug"),
|
||||||
|
title: t.exposeString("title"),
|
||||||
|
excerpt: t.exposeString("excerpt", { nullable: true }),
|
||||||
|
content: t.exposeString("content", { nullable: true }),
|
||||||
|
image: t.exposeString("image", { nullable: true }),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
publish_date: t.expose("publish_date", { type: "DateTime" }),
|
||||||
|
category: t.exposeString("category", { nullable: true }),
|
||||||
|
featured: t.exposeBoolean("featured", { nullable: true }),
|
||||||
|
author: t.expose("author", { type: VideoModelType, nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CommentUserType = builder.objectRef<CommentUser>("CommentUser").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
first_name: t.exposeString("first_name", { nullable: true }),
|
||||||
|
last_name: t.exposeString("last_name", { nullable: true }),
|
||||||
|
artist_name: t.exposeString("artist_name", { nullable: true }),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const CommentType = builder.objectRef<Comment>("Comment").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeInt("id"),
|
||||||
|
collection: t.exposeString("collection"),
|
||||||
|
item_id: t.exposeString("item_id"),
|
||||||
|
comment: t.exposeString("comment"),
|
||||||
|
user_id: t.exposeString("user_id"),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
user: t.expose("user", { type: CommentUserType, nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const StatsType = builder.objectRef<Stats>("Stats").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
videos_count: t.exposeInt("videos_count"),
|
||||||
|
models_count: t.exposeInt("models_count"),
|
||||||
|
viewers_count: t.exposeInt("viewers_count"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RecordingType = builder.objectRef<Recording>("Recording").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
title: t.exposeString("title"),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
slug: t.exposeString("slug"),
|
||||||
|
duration: t.exposeInt("duration"),
|
||||||
|
events: t.expose("events", { type: "JSON", nullable: true }),
|
||||||
|
device_info: t.expose("device_info", { type: "JSON", nullable: true }),
|
||||||
|
user_id: t.exposeString("user_id"),
|
||||||
|
status: t.exposeString("status"),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
linked_video: t.exposeString("linked_video", { nullable: true }),
|
||||||
|
featured: t.exposeBoolean("featured", { nullable: true }),
|
||||||
|
public: t.exposeBoolean("public", { nullable: true }),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
date_updated: t.expose("date_updated", { type: "DateTime", nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoLikeResponseType = builder
|
||||||
|
.objectRef<VideoLikeResponse>("VideoLikeResponse")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
liked: t.exposeBoolean("liked"),
|
||||||
|
likes_count: t.exposeInt("likes_count"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoPlayResponseType = builder
|
||||||
|
.objectRef<VideoPlayResponse>("VideoPlayResponse")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
success: t.exposeBoolean("success"),
|
||||||
|
play_id: t.exposeString("play_id"),
|
||||||
|
plays_count: t.exposeInt("plays_count"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoLikeStatusType = builder.objectRef<VideoLikeStatus>("VideoLikeStatus").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
liked: t.exposeBoolean("liked"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoAnalyticsType = builder.objectRef<VideoAnalytics>("VideoAnalytics").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
title: t.exposeString("title"),
|
||||||
|
slug: t.exposeString("slug"),
|
||||||
|
upload_date: t.expose("upload_date", { type: "DateTime" }),
|
||||||
|
likes: t.exposeInt("likes"),
|
||||||
|
plays: t.exposeInt("plays"),
|
||||||
|
completed_plays: t.exposeInt("completed_plays"),
|
||||||
|
completion_rate: t.exposeFloat("completion_rate"),
|
||||||
|
avg_watch_time: t.exposeInt("avg_watch_time"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AnalyticsType = builder.objectRef<Analytics>("Analytics").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
total_videos: t.exposeInt("total_videos"),
|
||||||
|
total_likes: t.exposeInt("total_likes"),
|
||||||
|
total_plays: t.exposeInt("total_plays"),
|
||||||
|
plays_by_date: t.expose("plays_by_date", { type: "JSON" }),
|
||||||
|
likes_by_date: t.expose("likes_by_date", { type: "JSON" }),
|
||||||
|
videos: t.expose("videos", { type: [VideoAnalyticsType] }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const LeaderboardEntryType = builder
|
||||||
|
.objectRef<LeaderboardEntry>("LeaderboardEntry")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
user_id: t.exposeString("user_id"),
|
||||||
|
display_name: t.exposeString("display_name", { nullable: true }),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
total_weighted_points: t.exposeFloat("total_weighted_points", { nullable: true }),
|
||||||
|
total_raw_points: t.exposeInt("total_raw_points", { nullable: true }),
|
||||||
|
recordings_count: t.exposeInt("recordings_count", { nullable: true }),
|
||||||
|
playbacks_count: t.exposeInt("playbacks_count", { nullable: true }),
|
||||||
|
achievements_count: t.exposeInt("achievements_count", { nullable: true }),
|
||||||
|
rank: t.exposeInt("rank"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UserStatsType = builder.objectRef<UserStats>("UserStats").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
user_id: t.exposeString("user_id"),
|
||||||
|
total_raw_points: t.exposeInt("total_raw_points", { nullable: true }),
|
||||||
|
total_weighted_points: t.exposeFloat("total_weighted_points", { nullable: true }),
|
||||||
|
recordings_count: t.exposeInt("recordings_count", { nullable: true }),
|
||||||
|
playbacks_count: t.exposeInt("playbacks_count", { nullable: true }),
|
||||||
|
comments_count: t.exposeInt("comments_count", { nullable: true }),
|
||||||
|
achievements_count: t.exposeInt("achievements_count", { nullable: true }),
|
||||||
|
rank: t.exposeInt("rank"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UserAchievementType = builder.objectRef<UserAchievement>("UserAchievement").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
code: t.exposeString("code"),
|
||||||
|
name: t.exposeString("name"),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
icon: t.exposeString("icon", { nullable: true }),
|
||||||
|
category: t.exposeString("category", { nullable: true }),
|
||||||
|
date_unlocked: t.expose("date_unlocked", { type: "DateTime" }),
|
||||||
|
progress: t.exposeInt("progress", { nullable: true }),
|
||||||
|
required_count: t.exposeInt("required_count"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const RecentPointType = builder.objectRef<RecentPoint>("RecentPoint").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
action: t.exposeString("action"),
|
||||||
|
points: t.exposeInt("points"),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
recording_id: t.exposeString("recording_id", { nullable: true }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const UserGamificationType = builder
|
||||||
|
.objectRef<UserGamification>("UserGamification")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
stats: t.expose("stats", { type: UserStatsType, nullable: true }),
|
||||||
|
achievements: t.expose("achievements", { type: [UserAchievementType] }),
|
||||||
|
recent_points: t.expose("recent_points", { type: [RecentPointType] }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AchievementType = builder.objectRef<Achievement>("Achievement").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
code: t.exposeString("code"),
|
||||||
|
name: t.exposeString("name"),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
icon: t.exposeString("icon", { nullable: true }),
|
||||||
|
category: t.exposeString("category", { nullable: true }),
|
||||||
|
required_count: t.exposeInt("required_count"),
|
||||||
|
points_reward: t.exposeInt("points_reward"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const VideoListType = builder
|
||||||
|
.objectRef<{ items: Video[]; total: number }>("VideoList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [VideoType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ArticleListType = builder
|
||||||
|
.objectRef<{ items: Article[]; total: number }>("ArticleList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [ArticleType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const ModelListType = builder
|
||||||
|
.objectRef<{ items: Model[]; total: number }>("ModelList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [ModelType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminVideoListType = builder
|
||||||
|
.objectRef<{ items: Video[]; total: number }>("AdminVideoList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [VideoType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminArticleListType = builder
|
||||||
|
.objectRef<{ items: Article[]; total: number }>("AdminArticleList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [ArticleType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminCommentListType = builder
|
||||||
|
.objectRef<{ items: Comment[]; total: number }>("AdminCommentList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [CommentType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminRecordingListType = builder
|
||||||
|
.objectRef<{ items: Recording[]; total: number }>("AdminRecordingList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [RecordingType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminUserListType = builder
|
||||||
|
.objectRef<{ items: User[]; total: number }>("AdminUserList")
|
||||||
|
.implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
items: t.expose("items", { type: [UserType] }),
|
||||||
|
total: t.exposeInt("total"),
|
||||||
|
}),
|
||||||
|
});
|
||||||
|
|
||||||
|
export const AdminUserDetailType = builder.objectRef<AdminUserDetail>("AdminUserDetail").implement({
|
||||||
|
fields: (t) => ({
|
||||||
|
id: t.exposeString("id"),
|
||||||
|
email: t.exposeString("email"),
|
||||||
|
first_name: t.exposeString("first_name", { nullable: true }),
|
||||||
|
last_name: t.exposeString("last_name", { nullable: true }),
|
||||||
|
artist_name: t.exposeString("artist_name", { nullable: true }),
|
||||||
|
slug: t.exposeString("slug", { nullable: true }),
|
||||||
|
description: t.exposeString("description", { nullable: true }),
|
||||||
|
tags: t.exposeStringList("tags", { nullable: true }),
|
||||||
|
role: t.exposeString("role"),
|
||||||
|
is_admin: t.exposeBoolean("is_admin"),
|
||||||
|
avatar: t.exposeString("avatar", { nullable: true }),
|
||||||
|
banner: t.exposeString("banner", { nullable: true }),
|
||||||
|
email_verified: t.exposeBoolean("email_verified"),
|
||||||
|
date_created: t.expose("date_created", { type: "DateTime" }),
|
||||||
|
photos: t.expose("photos", { type: [ModelPhotoType] }),
|
||||||
|
}),
|
||||||
|
});
|
||||||
140
packages/backend/src/index.ts
Normal file
140
packages/backend/src/index.ts
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
import Fastify, { type FastifyRequest, type FastifyReply } from "fastify";
|
||||||
|
import fastifyCookie from "@fastify/cookie";
|
||||||
|
import fastifyCors from "@fastify/cors";
|
||||||
|
import fastifyMultipart from "@fastify/multipart";
|
||||||
|
import fastifyStatic from "@fastify/static";
|
||||||
|
import { createYoga } from "graphql-yoga";
|
||||||
|
import { eq } from "drizzle-orm";
|
||||||
|
import { files } from "./db/schema/index";
|
||||||
|
import path from "path";
|
||||||
|
import { existsSync } from "fs";
|
||||||
|
import sharp from "sharp";
|
||||||
|
import { schema } from "./graphql/index";
|
||||||
|
import { buildContext } from "./graphql/context";
|
||||||
|
import { db } from "./db/connection";
|
||||||
|
import { redis } from "./lib/auth";
|
||||||
|
import { logger } from "./lib/logger";
|
||||||
|
|
||||||
|
const PORT = parseInt(process.env.PORT || "4000");
|
||||||
|
const UPLOAD_DIR = process.env.UPLOAD_DIR || "/data/uploads";
|
||||||
|
const CORS_ORIGIN = process.env.CORS_ORIGIN || "http://localhost:3000";
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const fastify = Fastify({ loggerInstance: logger });
|
||||||
|
|
||||||
|
await fastify.register(fastifyCookie, {
|
||||||
|
secret: process.env.COOKIE_SECRET || "change-me-in-production",
|
||||||
|
});
|
||||||
|
|
||||||
|
await fastify.register(fastifyCors, {
|
||||||
|
origin: CORS_ORIGIN,
|
||||||
|
credentials: true,
|
||||||
|
methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"],
|
||||||
|
});
|
||||||
|
|
||||||
|
await fastify.register(fastifyMultipart, {
|
||||||
|
limits: {
|
||||||
|
fileSize: 5 * 1024 * 1024 * 1024, // 5 GB
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
// fastify-static provides reply.sendFile(); files are stored as <UPLOAD_DIR>/<id>/<filename>
|
||||||
|
await fastify.register(fastifyStatic, {
|
||||||
|
root: path.resolve(UPLOAD_DIR),
|
||||||
|
prefix: "/assets/",
|
||||||
|
serve: false, // disable auto-serving; we use a custom route below
|
||||||
|
decorateReply: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const yoga = createYoga<{
|
||||||
|
req: FastifyRequest;
|
||||||
|
reply: FastifyReply;
|
||||||
|
db: typeof db;
|
||||||
|
redis: typeof redis;
|
||||||
|
}>({
|
||||||
|
schema,
|
||||||
|
context: buildContext,
|
||||||
|
graphqlEndpoint: "/graphql",
|
||||||
|
healthCheckEndpoint: "/health",
|
||||||
|
logging: {
|
||||||
|
debug: (...args) => args.forEach((arg) => fastify.log.debug(arg)),
|
||||||
|
info: (...args) => args.forEach((arg) => fastify.log.info(arg)),
|
||||||
|
warn: (...args) => args.forEach((arg) => fastify.log.warn(arg)),
|
||||||
|
error: (...args) => args.forEach((arg) => fastify.log.error(arg)),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
fastify.route({
|
||||||
|
url: "/graphql",
|
||||||
|
method: ["GET", "POST", "OPTIONS"],
|
||||||
|
handler: (req, reply) =>
|
||||||
|
yoga.handleNodeRequestAndResponse(req, reply, { req, reply, db, redis }),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Transform presets — only banner/thumbnail force a crop; others preserve aspect ratio
|
||||||
|
const TRANSFORMS: Record<string, { width: number; height?: number; fit?: "cover" | "inside" }> = {
|
||||||
|
mini: { width: 80, height: 80, fit: "cover" },
|
||||||
|
thumbnail: { width: 300, height: 300, fit: "cover" },
|
||||||
|
preview: { width: 800, fit: "inside" },
|
||||||
|
medium: { width: 1400, fit: "inside" },
|
||||||
|
banner: { width: 1600, height: 480, fit: "cover" },
|
||||||
|
};
|
||||||
|
|
||||||
|
// Serve uploaded files: GET /assets/:id?transform=<preset>
|
||||||
|
// Files are stored as <UPLOAD_DIR>/<id>/<filename> — look up filename in DB
|
||||||
|
fastify.get("/assets/:id", async (request, reply) => {
|
||||||
|
const { id } = request.params as { id: string };
|
||||||
|
const { transform } = request.query as { transform?: string };
|
||||||
|
|
||||||
|
const result = await db
|
||||||
|
.select({ filename: files.filename, mime_type: files.mime_type })
|
||||||
|
.from(files)
|
||||||
|
.where(eq(files.id, id))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!result[0]) return reply.status(404).send({ error: "File not found" });
|
||||||
|
|
||||||
|
const { filename, mime_type } = result[0];
|
||||||
|
reply.header("Cache-Control", "public, max-age=31536000, immutable");
|
||||||
|
|
||||||
|
const preset = transform ? TRANSFORMS[transform] : null;
|
||||||
|
if (preset && mime_type?.startsWith("image/")) {
|
||||||
|
const cacheFile = path.join(UPLOAD_DIR, id, `${transform}.webp`);
|
||||||
|
if (!existsSync(cacheFile)) {
|
||||||
|
const originalPath = path.join(UPLOAD_DIR, id, filename);
|
||||||
|
await sharp(originalPath)
|
||||||
|
.resize({
|
||||||
|
width: preset.width,
|
||||||
|
height: preset.height,
|
||||||
|
fit: preset.fit ?? "inside",
|
||||||
|
withoutEnlargement: true,
|
||||||
|
})
|
||||||
|
.webp({ quality: 92 })
|
||||||
|
.toFile(cacheFile);
|
||||||
|
}
|
||||||
|
reply.header("Content-Type", "image/webp");
|
||||||
|
return reply.sendFile(path.join(id, `${transform}.webp`));
|
||||||
|
}
|
||||||
|
|
||||||
|
reply.header("Content-Type", mime_type);
|
||||||
|
return reply.sendFile(path.join(id, filename));
|
||||||
|
});
|
||||||
|
|
||||||
|
fastify.get("/health", async (_request, reply) => {
|
||||||
|
return reply.send({ status: "ok", timestamp: new Date().toISOString() });
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
await fastify.listen({ port: PORT, host: "0.0.0.0" });
|
||||||
|
fastify.log.info(`Backend running at http://0.0.0.0:${PORT}`);
|
||||||
|
fastify.log.info(`GraphQL at http://0.0.0.0:${PORT}/graphql`);
|
||||||
|
} catch (err) {
|
||||||
|
fastify.log.error(err);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error("Fatal error:", err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
18
packages/backend/src/lib/acl.ts
Normal file
18
packages/backend/src/lib/acl.ts
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
import { GraphQLError } from "graphql";
|
||||||
|
import type { Context } from "../graphql/builder";
|
||||||
|
|
||||||
|
export function requireAuth(ctx: Context): void {
|
||||||
|
if (!ctx.currentUser) throw new GraphQLError("Unauthorized");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function requireAdmin(ctx: Context): void {
|
||||||
|
requireAuth(ctx);
|
||||||
|
if (!ctx.currentUser!.is_admin) throw new GraphQLError("Forbidden");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function requireOwnerOrAdmin(ctx: Context, ownerId: string): void {
|
||||||
|
requireAuth(ctx);
|
||||||
|
if (ctx.currentUser!.id !== ownerId && !ctx.currentUser!.is_admin) {
|
||||||
|
throw new GraphQLError("Forbidden");
|
||||||
|
}
|
||||||
|
}
|
||||||
9
packages/backend/src/lib/argon.ts
Normal file
9
packages/backend/src/lib/argon.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
import argon2 from "argon2";
|
||||||
|
|
||||||
|
export async function hash(password: string): Promise<string> {
|
||||||
|
return argon2.hash(password);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function verify(hash: string, password: string): Promise<boolean> {
|
||||||
|
return argon2.verify(hash, password);
|
||||||
|
}
|
||||||
31
packages/backend/src/lib/auth.ts
Normal file
31
packages/backend/src/lib/auth.ts
Normal file
@@ -0,0 +1,31 @@
|
|||||||
|
import Redis from "ioredis";
|
||||||
|
|
||||||
|
export type SessionUser = {
|
||||||
|
id: string;
|
||||||
|
email: string;
|
||||||
|
role: "model" | "viewer";
|
||||||
|
is_admin: boolean;
|
||||||
|
first_name: string | null;
|
||||||
|
last_name: string | null;
|
||||||
|
artist_name: string | null;
|
||||||
|
slug: string | null;
|
||||||
|
avatar: string | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const redis = new Redis(process.env.REDIS_URL || "redis://localhost:6379");
|
||||||
|
|
||||||
|
export async function setSession(token: string, user: SessionUser): Promise<void> {
|
||||||
|
await redis.set(`session:${token}`, JSON.stringify(user), "EX", 86400);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getSession(token: string): Promise<SessionUser | null> {
|
||||||
|
const data = await redis.get(`session:${token}`);
|
||||||
|
if (!data) return null;
|
||||||
|
// Slide the expiration window on every access
|
||||||
|
await redis.expire(`session:${token}`, 86400);
|
||||||
|
return JSON.parse(data) as SessionUser;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteSession(token: string): Promise<void> {
|
||||||
|
await redis.del(`session:${token}`);
|
||||||
|
}
|
||||||
34
packages/backend/src/lib/email.ts
Normal file
34
packages/backend/src/lib/email.ts
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import nodemailer from "nodemailer";
|
||||||
|
|
||||||
|
const transporter = nodemailer.createTransport({
|
||||||
|
host: process.env.SMTP_HOST || "localhost",
|
||||||
|
port: parseInt(process.env.SMTP_PORT || "587"),
|
||||||
|
secure: process.env.SMTP_SECURE === "true",
|
||||||
|
auth: process.env.SMTP_USER
|
||||||
|
? {
|
||||||
|
user: process.env.SMTP_USER,
|
||||||
|
pass: process.env.SMTP_PASS,
|
||||||
|
}
|
||||||
|
: undefined,
|
||||||
|
});
|
||||||
|
|
||||||
|
const FROM = process.env.EMAIL_FROM || "noreply@sexy.pivoine.art";
|
||||||
|
const BASE_URL = process.env.PUBLIC_URL || "http://localhost:3000";
|
||||||
|
|
||||||
|
export async function sendVerification(email: string, token: string): Promise<void> {
|
||||||
|
await transporter.sendMail({
|
||||||
|
from: FROM,
|
||||||
|
to: email,
|
||||||
|
subject: "Verify your email",
|
||||||
|
html: `<p>Click <a href="${BASE_URL}/signup/verify?token=${token}">here</a> to verify your email.</p>`,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function sendPasswordReset(email: string, token: string): Promise<void> {
|
||||||
|
await transporter.sendMail({
|
||||||
|
from: FROM,
|
||||||
|
to: email,
|
||||||
|
subject: "Reset your password",
|
||||||
|
html: `<p>Click <a href="${BASE_URL}/password/reset?token=${token}">here</a> to reset your password.</p>`,
|
||||||
|
});
|
||||||
|
}
|
||||||
10
packages/backend/src/lib/ffmpeg.ts
Normal file
10
packages/backend/src/lib/ffmpeg.ts
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
import ffmpeg from "fluent-ffmpeg";
|
||||||
|
|
||||||
|
export function extractDuration(filePath: string): Promise<number> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
ffmpeg.ffprobe(filePath, (err, metadata) => {
|
||||||
|
if (err) return reject(err);
|
||||||
|
resolve(Math.round(metadata.format.duration || 0));
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
323
packages/backend/src/lib/gamification.ts
Normal file
323
packages/backend/src/lib/gamification.ts
Normal file
@@ -0,0 +1,323 @@
|
|||||||
|
import { eq, sql, and, gt, isNotNull, count, sum } from "drizzle-orm";
|
||||||
|
import type { DB } from "../db/connection";
|
||||||
|
import {
|
||||||
|
user_points,
|
||||||
|
user_stats,
|
||||||
|
recordings,
|
||||||
|
recording_plays,
|
||||||
|
comments,
|
||||||
|
user_achievements,
|
||||||
|
achievements,
|
||||||
|
users,
|
||||||
|
} from "../db/schema/index";
|
||||||
|
|
||||||
|
export const POINT_VALUES = {
|
||||||
|
RECORDING_CREATE: 50,
|
||||||
|
RECORDING_PLAY: 10,
|
||||||
|
RECORDING_COMPLETE: 5,
|
||||||
|
COMMENT_CREATE: 5,
|
||||||
|
RECORDING_FEATURED: 100,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
const DECAY_LAMBDA = 0.005;
|
||||||
|
|
||||||
|
export async function awardPoints(
|
||||||
|
db: DB,
|
||||||
|
userId: string,
|
||||||
|
action: keyof typeof POINT_VALUES,
|
||||||
|
recordingId?: string,
|
||||||
|
): Promise<void> {
|
||||||
|
const points = POINT_VALUES[action];
|
||||||
|
await db.insert(user_points).values({
|
||||||
|
user_id: userId,
|
||||||
|
action,
|
||||||
|
points,
|
||||||
|
recording_id: recordingId || null,
|
||||||
|
date_created: new Date(),
|
||||||
|
});
|
||||||
|
await updateUserStats(db, userId);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function calculateWeightedScore(db: DB, userId: string): Promise<number> {
|
||||||
|
const now = new Date();
|
||||||
|
const result = await db.execute(sql`
|
||||||
|
SELECT SUM(
|
||||||
|
points * EXP(-${DECAY_LAMBDA} * EXTRACT(EPOCH FROM (${now}::timestamptz - date_created)) / 86400)
|
||||||
|
) as weighted_score
|
||||||
|
FROM user_points
|
||||||
|
WHERE user_id = ${userId}
|
||||||
|
`);
|
||||||
|
return parseFloat((result.rows[0] as any)?.weighted_score || "0");
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateUserStats(db: DB, userId: string): Promise<void> {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const rawPointsResult = await db
|
||||||
|
.select({ total: sum(user_points.points) })
|
||||||
|
.from(user_points)
|
||||||
|
.where(eq(user_points.user_id, userId));
|
||||||
|
const totalRawPoints = parseInt(String(rawPointsResult[0]?.total || "0"));
|
||||||
|
|
||||||
|
const totalWeightedPoints = await calculateWeightedScore(db, userId);
|
||||||
|
|
||||||
|
const recordingsResult = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(recordings)
|
||||||
|
.where(and(eq(recordings.user_id, userId), eq(recordings.status, "published")));
|
||||||
|
const recordingsCount = recordingsResult[0]?.count || 0;
|
||||||
|
|
||||||
|
// Get playbacks count (excluding own recordings)
|
||||||
|
const ownRecordingIds = await db
|
||||||
|
.select({ id: recordings.id })
|
||||||
|
.from(recordings)
|
||||||
|
.where(eq(recordings.user_id, userId));
|
||||||
|
const ownIds = ownRecordingIds.map((r) => r.id);
|
||||||
|
|
||||||
|
let playbacksCount: number;
|
||||||
|
if (ownIds.length > 0) {
|
||||||
|
const playbacksResult = await db.execute(sql`
|
||||||
|
SELECT COUNT(*) as count FROM recording_plays
|
||||||
|
WHERE user_id = ${userId}
|
||||||
|
AND recording_id NOT IN (${sql.join(
|
||||||
|
ownIds.map((id) => sql`${id}`),
|
||||||
|
sql`, `,
|
||||||
|
)})
|
||||||
|
`);
|
||||||
|
playbacksCount = parseInt((playbacksResult.rows[0] as any)?.count || "0");
|
||||||
|
} else {
|
||||||
|
const playbacksResult = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(recording_plays)
|
||||||
|
.where(eq(recording_plays.user_id, userId));
|
||||||
|
playbacksCount = playbacksResult[0]?.count || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
const commentsResult = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(comments)
|
||||||
|
.where(and(eq(comments.user_id, userId), eq(comments.collection, "recordings")));
|
||||||
|
const commentsCount = commentsResult[0]?.count || 0;
|
||||||
|
|
||||||
|
const achievementsResult = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(user_achievements)
|
||||||
|
.where(and(eq(user_achievements.user_id, userId), isNotNull(user_achievements.date_unlocked)));
|
||||||
|
const achievementsCount = achievementsResult[0]?.count || 0;
|
||||||
|
|
||||||
|
const existing = await db
|
||||||
|
.select()
|
||||||
|
.from(user_stats)
|
||||||
|
.where(eq(user_stats.user_id, userId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
await db
|
||||||
|
.update(user_stats)
|
||||||
|
.set({
|
||||||
|
total_raw_points: totalRawPoints,
|
||||||
|
total_weighted_points: totalWeightedPoints,
|
||||||
|
recordings_count: recordingsCount,
|
||||||
|
playbacks_count: playbacksCount,
|
||||||
|
comments_count: commentsCount,
|
||||||
|
achievements_count: achievementsCount,
|
||||||
|
last_updated: now,
|
||||||
|
})
|
||||||
|
.where(eq(user_stats.user_id, userId));
|
||||||
|
} else {
|
||||||
|
await db.insert(user_stats).values({
|
||||||
|
user_id: userId,
|
||||||
|
total_raw_points: totalRawPoints,
|
||||||
|
total_weighted_points: totalWeightedPoints,
|
||||||
|
recordings_count: recordingsCount,
|
||||||
|
playbacks_count: playbacksCount,
|
||||||
|
comments_count: commentsCount,
|
||||||
|
achievements_count: achievementsCount,
|
||||||
|
last_updated: now,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function checkAchievements(db: DB, userId: string, category?: string): Promise<void> {
|
||||||
|
let achievementsQuery = db
|
||||||
|
.select()
|
||||||
|
.from(achievements)
|
||||||
|
.where(eq(achievements.status, "published"));
|
||||||
|
|
||||||
|
if (category) {
|
||||||
|
achievementsQuery = db
|
||||||
|
.select()
|
||||||
|
.from(achievements)
|
||||||
|
.where(and(eq(achievements.status, "published"), eq(achievements.category, category)));
|
||||||
|
}
|
||||||
|
|
||||||
|
const achievementsList = await achievementsQuery;
|
||||||
|
|
||||||
|
for (const achievement of achievementsList) {
|
||||||
|
const progress = await getAchievementProgress(db, userId, achievement);
|
||||||
|
|
||||||
|
const existing = await db
|
||||||
|
.select()
|
||||||
|
.from(user_achievements)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(user_achievements.user_id, userId),
|
||||||
|
eq(user_achievements.achievement_id, achievement.id),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
const isUnlocked = progress >= achievement.required_count;
|
||||||
|
const wasUnlocked = existing[0]?.date_unlocked !== null;
|
||||||
|
|
||||||
|
if (existing.length > 0) {
|
||||||
|
await db
|
||||||
|
.update(user_achievements)
|
||||||
|
.set({
|
||||||
|
progress,
|
||||||
|
date_unlocked: isUnlocked ? existing[0].date_unlocked || new Date() : null,
|
||||||
|
})
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(user_achievements.user_id, userId),
|
||||||
|
eq(user_achievements.achievement_id, achievement.id),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
await db.insert(user_achievements).values({
|
||||||
|
user_id: userId,
|
||||||
|
achievement_id: achievement.id,
|
||||||
|
progress,
|
||||||
|
date_unlocked: isUnlocked ? new Date() : null,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isUnlocked && !wasUnlocked && achievement.points_reward > 0) {
|
||||||
|
await db.insert(user_points).values({
|
||||||
|
user_id: userId,
|
||||||
|
action: `ACHIEVEMENT_${achievement.code}`,
|
||||||
|
points: achievement.points_reward,
|
||||||
|
recording_id: null,
|
||||||
|
date_created: new Date(),
|
||||||
|
});
|
||||||
|
await updateUserStats(db, userId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function getAchievementProgress(
|
||||||
|
db: DB,
|
||||||
|
userId: string,
|
||||||
|
achievement: typeof achievements.$inferSelect,
|
||||||
|
): Promise<number> {
|
||||||
|
const { code } = achievement;
|
||||||
|
|
||||||
|
if (["first_recording", "recording_10", "recording_50", "recording_100"].includes(code)) {
|
||||||
|
const result = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(recordings)
|
||||||
|
.where(and(eq(recordings.user_id, userId), eq(recordings.status, "published")));
|
||||||
|
return result[0]?.count || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code === "featured_recording") {
|
||||||
|
const result = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(recordings)
|
||||||
|
.where(
|
||||||
|
and(
|
||||||
|
eq(recordings.user_id, userId),
|
||||||
|
eq(recordings.status, "published"),
|
||||||
|
eq(recordings.featured, true),
|
||||||
|
),
|
||||||
|
);
|
||||||
|
return result[0]?.count || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (["first_play", "play_100", "play_500"].includes(code)) {
|
||||||
|
const result = await db.execute(sql`
|
||||||
|
SELECT COUNT(*) as count
|
||||||
|
FROM recording_plays rp
|
||||||
|
LEFT JOIN recordings r ON rp.recording_id = r.id
|
||||||
|
WHERE rp.user_id = ${userId}
|
||||||
|
AND r.user_id != ${userId}
|
||||||
|
`);
|
||||||
|
return parseInt((result.rows[0] as any)?.count || "0");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (["completionist_10", "completionist_100"].includes(code)) {
|
||||||
|
const result = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(recording_plays)
|
||||||
|
.where(and(eq(recording_plays.user_id, userId), eq(recording_plays.completed, true)));
|
||||||
|
return result[0]?.count || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (["first_comment", "comment_50", "comment_250"].includes(code)) {
|
||||||
|
const result = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(comments)
|
||||||
|
.where(and(eq(comments.user_id, userId), eq(comments.collection, "recordings")));
|
||||||
|
return result[0]?.count || 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code === "early_adopter") {
|
||||||
|
const user = await db.select().from(users).where(eq(users.id, userId)).limit(1);
|
||||||
|
if (user[0]) {
|
||||||
|
const joinDate = new Date(user[0].date_created);
|
||||||
|
const platformLaunch = new Date("2025-01-01");
|
||||||
|
const oneMonthAfterLaunch = new Date(platformLaunch);
|
||||||
|
oneMonthAfterLaunch.setMonth(oneMonthAfterLaunch.getMonth() + 1);
|
||||||
|
return joinDate <= oneMonthAfterLaunch ? 1 : 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code === "one_year") {
|
||||||
|
const user = await db.select().from(users).where(eq(users.id, userId)).limit(1);
|
||||||
|
if (user[0]) {
|
||||||
|
const joinDate = new Date(user[0].date_created);
|
||||||
|
const oneYearAgo = new Date();
|
||||||
|
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
||||||
|
return joinDate <= oneYearAgo ? 1 : 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code === "balanced_creator") {
|
||||||
|
const recordingsResult = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(recordings)
|
||||||
|
.where(and(eq(recordings.user_id, userId), eq(recordings.status, "published")));
|
||||||
|
const playsResult = await db.execute(sql`
|
||||||
|
SELECT COUNT(*) as count FROM recording_plays rp
|
||||||
|
LEFT JOIN recordings r ON rp.recording_id = r.id
|
||||||
|
WHERE rp.user_id = ${userId} AND r.user_id != ${userId}
|
||||||
|
`);
|
||||||
|
const rc = recordingsResult[0]?.count || 0;
|
||||||
|
const pc = parseInt((playsResult.rows[0] as any)?.count || "0");
|
||||||
|
return rc >= 50 && pc >= 100 ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (code === "top_10_rank") {
|
||||||
|
const userStat = await db
|
||||||
|
.select()
|
||||||
|
.from(user_stats)
|
||||||
|
.where(eq(user_stats.user_id, userId))
|
||||||
|
.limit(1);
|
||||||
|
if (!userStat[0]) return 0;
|
||||||
|
const rankResult = await db
|
||||||
|
.select({ count: count() })
|
||||||
|
.from(user_stats)
|
||||||
|
.where(gt(user_stats.total_weighted_points, userStat[0].total_weighted_points || 0));
|
||||||
|
const userRank = (rankResult[0]?.count || 0) + 1;
|
||||||
|
return userRank <= 10 ? 1 : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function recalculateAllWeightedScores(db: DB): Promise<void> {
|
||||||
|
const allUsers = await db.select({ user_id: user_stats.user_id }).from(user_stats);
|
||||||
|
for (const u of allUsers) {
|
||||||
|
await updateUserStats(db, u.user_id);
|
||||||
|
}
|
||||||
|
}
|
||||||
101
packages/backend/src/lib/logger.ts
Normal file
101
packages/backend/src/lib/logger.ts
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
type LogLevel = "trace" | "debug" | "info" | "warn" | "error" | "fatal";
|
||||||
|
|
||||||
|
const LEVEL_VALUES: Record<LogLevel, number> = {
|
||||||
|
trace: 10,
|
||||||
|
debug: 20,
|
||||||
|
info: 30,
|
||||||
|
warn: 40,
|
||||||
|
error: 50,
|
||||||
|
fatal: 60,
|
||||||
|
};
|
||||||
|
|
||||||
|
function createLogger(bindings: Record<string, unknown> = {}, initialLevel: LogLevel = "info") {
|
||||||
|
let currentLevel = initialLevel;
|
||||||
|
|
||||||
|
function shouldLog(level: LogLevel): boolean {
|
||||||
|
return LEVEL_VALUES[level] >= LEVEL_VALUES[currentLevel];
|
||||||
|
}
|
||||||
|
|
||||||
|
function formatMessage(level: LogLevel, arg: unknown, msg?: string): string {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
let message: string;
|
||||||
|
const meta: Record<string, unknown> = { ...bindings };
|
||||||
|
|
||||||
|
if (typeof arg === "string") {
|
||||||
|
message = arg;
|
||||||
|
} else if (arg !== null && typeof arg === "object") {
|
||||||
|
// Pino-style: log(obj, msg?) — strip internal pino keys
|
||||||
|
const {
|
||||||
|
msg: m,
|
||||||
|
level: _l,
|
||||||
|
time: _t,
|
||||||
|
pid: _p,
|
||||||
|
hostname: _h,
|
||||||
|
req: _req,
|
||||||
|
res: _res,
|
||||||
|
reqId,
|
||||||
|
...rest
|
||||||
|
} = arg as Record<string, unknown>;
|
||||||
|
message = msg || (typeof m === "string" ? m : "");
|
||||||
|
if (reqId) meta.reqId = reqId;
|
||||||
|
Object.assign(meta, rest);
|
||||||
|
} else {
|
||||||
|
message = String(arg ?? "");
|
||||||
|
}
|
||||||
|
|
||||||
|
const parts = [`[${timestamp}]`, `[${level.toUpperCase()}]`, message];
|
||||||
|
let result = parts.join(" ");
|
||||||
|
|
||||||
|
const metaEntries = Object.entries(meta).filter(([k]) => k !== "reqId");
|
||||||
|
const reqId = meta.reqId;
|
||||||
|
if (reqId) result = `[${timestamp}] [${level.toUpperCase()}] [${reqId}] ${message}`;
|
||||||
|
|
||||||
|
if (metaEntries.length > 0) {
|
||||||
|
result += " " + JSON.stringify(Object.fromEntries(metaEntries));
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
function write(level: LogLevel, arg: unknown, msg?: string) {
|
||||||
|
if (!shouldLog(level)) return;
|
||||||
|
const formatted = formatMessage(level, arg, msg);
|
||||||
|
switch (level) {
|
||||||
|
case "trace":
|
||||||
|
case "debug":
|
||||||
|
console.debug(formatted);
|
||||||
|
break;
|
||||||
|
case "info":
|
||||||
|
console.info(formatted);
|
||||||
|
break;
|
||||||
|
case "warn":
|
||||||
|
console.warn(formatted);
|
||||||
|
break;
|
||||||
|
case "error":
|
||||||
|
case "fatal":
|
||||||
|
console.error(formatted);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
get level() {
|
||||||
|
return currentLevel;
|
||||||
|
},
|
||||||
|
set level(l: string) {
|
||||||
|
currentLevel = l as LogLevel;
|
||||||
|
},
|
||||||
|
trace: (arg: unknown, msg?: string) => write("trace", arg, msg),
|
||||||
|
debug: (arg: unknown, msg?: string) => write("debug", arg, msg),
|
||||||
|
info: (arg: unknown, msg?: string) => write("info", arg, msg),
|
||||||
|
warn: (arg: unknown, msg?: string) => write("warn", arg, msg),
|
||||||
|
error: (arg: unknown, msg?: string) => write("error", arg, msg),
|
||||||
|
fatal: (arg: unknown, msg?: string) => write("fatal", arg, msg),
|
||||||
|
silent: () => {},
|
||||||
|
child: (newBindings: Record<string, unknown>) =>
|
||||||
|
createLogger({ ...bindings, ...newBindings }, currentLevel),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export const logger = createLogger({}, (process.env.LOG_LEVEL as LogLevel) || "info");
|
||||||
5
packages/backend/src/lib/slugify.ts
Normal file
5
packages/backend/src/lib/slugify.ts
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
import slugifyLib from "slugify";
|
||||||
|
|
||||||
|
export function slugify(text: string): string {
|
||||||
|
return slugifyLib(text, { lower: true, strict: true });
|
||||||
|
}
|
||||||
233
packages/backend/src/migrations/0000_pale_hellion.sql
Normal file
233
packages/backend/src/migrations/0000_pale_hellion.sql
Normal file
@@ -0,0 +1,233 @@
|
|||||||
|
CREATE TYPE "public"."achievement_status" AS ENUM('draft', 'published');--> statement-breakpoint
|
||||||
|
CREATE TYPE "public"."user_role" AS ENUM('model', 'viewer', 'admin');--> statement-breakpoint
|
||||||
|
CREATE TYPE "public"."recording_status" AS ENUM('draft', 'published', 'archived');--> statement-breakpoint
|
||||||
|
CREATE TABLE "articles" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"slug" text NOT NULL,
|
||||||
|
"title" text NOT NULL,
|
||||||
|
"excerpt" text,
|
||||||
|
"content" text,
|
||||||
|
"image" text,
|
||||||
|
"tags" text[] DEFAULT '{}',
|
||||||
|
"publish_date" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"author" text,
|
||||||
|
"category" text,
|
||||||
|
"featured" boolean DEFAULT false,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"date_updated" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "comments" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "comments_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"collection" text NOT NULL,
|
||||||
|
"item_id" text NOT NULL,
|
||||||
|
"comment" text NOT NULL,
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"date_updated" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "files" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"title" text,
|
||||||
|
"description" text,
|
||||||
|
"filename" text NOT NULL,
|
||||||
|
"mime_type" text,
|
||||||
|
"filesize" bigint,
|
||||||
|
"duration" integer,
|
||||||
|
"uploaded_by" text,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "achievements" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"code" text NOT NULL,
|
||||||
|
"name" text NOT NULL,
|
||||||
|
"description" text,
|
||||||
|
"icon" text,
|
||||||
|
"category" text,
|
||||||
|
"required_count" integer DEFAULT 1 NOT NULL,
|
||||||
|
"points_reward" integer DEFAULT 0 NOT NULL,
|
||||||
|
"status" "achievement_status" DEFAULT 'published' NOT NULL,
|
||||||
|
"sort" integer DEFAULT 0
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "user_achievements" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "user_achievements_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"achievement_id" text NOT NULL,
|
||||||
|
"progress" integer DEFAULT 0,
|
||||||
|
"date_unlocked" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "user_points" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "user_points_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"action" text NOT NULL,
|
||||||
|
"points" integer NOT NULL,
|
||||||
|
"recording_id" text,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "user_stats" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "user_stats_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"total_raw_points" integer DEFAULT 0,
|
||||||
|
"total_weighted_points" real DEFAULT 0,
|
||||||
|
"recordings_count" integer DEFAULT 0,
|
||||||
|
"playbacks_count" integer DEFAULT 0,
|
||||||
|
"comments_count" integer DEFAULT 0,
|
||||||
|
"achievements_count" integer DEFAULT 0,
|
||||||
|
"last_updated" timestamp DEFAULT now()
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "user_photos" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "user_photos_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"file_id" text NOT NULL,
|
||||||
|
"sort" integer DEFAULT 0
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "users" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"email" text NOT NULL,
|
||||||
|
"password_hash" text NOT NULL,
|
||||||
|
"first_name" text,
|
||||||
|
"last_name" text,
|
||||||
|
"artist_name" text,
|
||||||
|
"slug" text,
|
||||||
|
"description" text,
|
||||||
|
"tags" text[] DEFAULT '{}',
|
||||||
|
"role" "user_role" DEFAULT 'viewer' NOT NULL,
|
||||||
|
"avatar" text,
|
||||||
|
"banner" text,
|
||||||
|
"email_verified" boolean DEFAULT false NOT NULL,
|
||||||
|
"email_verify_token" text,
|
||||||
|
"password_reset_token" text,
|
||||||
|
"password_reset_expiry" timestamp,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"date_updated" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "video_likes" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"video_id" text NOT NULL,
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "video_models" (
|
||||||
|
"video_id" text NOT NULL,
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
CONSTRAINT "video_models_video_id_user_id_pk" PRIMARY KEY("video_id","user_id")
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "video_plays" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"video_id" text NOT NULL,
|
||||||
|
"user_id" text,
|
||||||
|
"session_id" text,
|
||||||
|
"duration_watched" integer,
|
||||||
|
"completed" boolean DEFAULT false,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"date_updated" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "videos" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"slug" text NOT NULL,
|
||||||
|
"title" text NOT NULL,
|
||||||
|
"description" text,
|
||||||
|
"image" text,
|
||||||
|
"movie" text,
|
||||||
|
"tags" text[] DEFAULT '{}',
|
||||||
|
"upload_date" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"premium" boolean DEFAULT false,
|
||||||
|
"featured" boolean DEFAULT false,
|
||||||
|
"likes_count" integer DEFAULT 0,
|
||||||
|
"plays_count" integer DEFAULT 0
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "recording_plays" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"recording_id" text NOT NULL,
|
||||||
|
"user_id" text,
|
||||||
|
"duration_played" integer DEFAULT 0,
|
||||||
|
"completed" boolean DEFAULT false,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"date_updated" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "recordings" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"title" text NOT NULL,
|
||||||
|
"description" text,
|
||||||
|
"slug" text NOT NULL,
|
||||||
|
"duration" integer NOT NULL,
|
||||||
|
"events" jsonb DEFAULT '[]'::jsonb,
|
||||||
|
"device_info" jsonb DEFAULT '[]'::jsonb,
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"status" "recording_status" DEFAULT 'draft' NOT NULL,
|
||||||
|
"tags" text[] DEFAULT '{}',
|
||||||
|
"linked_video" text,
|
||||||
|
"featured" boolean DEFAULT false,
|
||||||
|
"public" boolean DEFAULT false,
|
||||||
|
"original_recording_id" text,
|
||||||
|
"date_created" timestamp DEFAULT now() NOT NULL,
|
||||||
|
"date_updated" timestamp
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
ALTER TABLE "articles" ADD CONSTRAINT "articles_image_files_id_fk" FOREIGN KEY ("image") REFERENCES "public"."files"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "articles" ADD CONSTRAINT "articles_author_users_id_fk" FOREIGN KEY ("author") REFERENCES "public"."users"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "comments" ADD CONSTRAINT "comments_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_achievements" ADD CONSTRAINT "user_achievements_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_achievements" ADD CONSTRAINT "user_achievements_achievement_id_achievements_id_fk" FOREIGN KEY ("achievement_id") REFERENCES "public"."achievements"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_points" ADD CONSTRAINT "user_points_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_points" ADD CONSTRAINT "user_points_recording_id_recordings_id_fk" FOREIGN KEY ("recording_id") REFERENCES "public"."recordings"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_stats" ADD CONSTRAINT "user_stats_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_photos" ADD CONSTRAINT "user_photos_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "user_photos" ADD CONSTRAINT "user_photos_file_id_files_id_fk" FOREIGN KEY ("file_id") REFERENCES "public"."files"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "users" ADD CONSTRAINT "users_avatar_files_id_fk" FOREIGN KEY ("avatar") REFERENCES "public"."files"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "users" ADD CONSTRAINT "users_banner_files_id_fk" FOREIGN KEY ("banner") REFERENCES "public"."files"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "video_likes" ADD CONSTRAINT "video_likes_video_id_videos_id_fk" FOREIGN KEY ("video_id") REFERENCES "public"."videos"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "video_likes" ADD CONSTRAINT "video_likes_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "video_models" ADD CONSTRAINT "video_models_video_id_videos_id_fk" FOREIGN KEY ("video_id") REFERENCES "public"."videos"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "video_models" ADD CONSTRAINT "video_models_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "video_plays" ADD CONSTRAINT "video_plays_video_id_videos_id_fk" FOREIGN KEY ("video_id") REFERENCES "public"."videos"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "video_plays" ADD CONSTRAINT "video_plays_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "videos" ADD CONSTRAINT "videos_image_files_id_fk" FOREIGN KEY ("image") REFERENCES "public"."files"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "videos" ADD CONSTRAINT "videos_movie_files_id_fk" FOREIGN KEY ("movie") REFERENCES "public"."files"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "recording_plays" ADD CONSTRAINT "recording_plays_recording_id_recordings_id_fk" FOREIGN KEY ("recording_id") REFERENCES "public"."recordings"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "recording_plays" ADD CONSTRAINT "recording_plays_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "recordings" ADD CONSTRAINT "recordings_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "recordings" ADD CONSTRAINT "recordings_linked_video_videos_id_fk" FOREIGN KEY ("linked_video") REFERENCES "public"."videos"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "articles_slug_idx" ON "articles" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "articles_publish_date_idx" ON "articles" USING btree ("publish_date");--> statement-breakpoint
|
||||||
|
CREATE INDEX "articles_featured_idx" ON "articles" USING btree ("featured");--> statement-breakpoint
|
||||||
|
CREATE INDEX "comments_collection_item_idx" ON "comments" USING btree ("collection","item_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "comments_user_idx" ON "comments" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "files_uploaded_by_idx" ON "files" USING btree ("uploaded_by");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "achievements_code_idx" ON "achievements" USING btree ("code");--> statement-breakpoint
|
||||||
|
CREATE INDEX "user_achievements_user_idx" ON "user_achievements" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "user_achievements_unique_idx" ON "user_achievements" USING btree ("user_id","achievement_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "user_points_user_idx" ON "user_points" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "user_points_date_idx" ON "user_points" USING btree ("date_created");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "user_stats_user_idx" ON "user_stats" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "user_photos_user_idx" ON "user_photos" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "users_email_idx" ON "users" USING btree ("email");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "users_slug_idx" ON "users" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "users_role_idx" ON "users" USING btree ("role");--> statement-breakpoint
|
||||||
|
CREATE INDEX "video_likes_video_idx" ON "video_likes" USING btree ("video_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "video_likes_user_idx" ON "video_likes" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "video_plays_video_idx" ON "video_plays" USING btree ("video_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "video_plays_user_idx" ON "video_plays" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "video_plays_date_idx" ON "video_plays" USING btree ("date_created");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "videos_slug_idx" ON "videos" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "videos_upload_date_idx" ON "videos" USING btree ("upload_date");--> statement-breakpoint
|
||||||
|
CREATE INDEX "videos_featured_idx" ON "videos" USING btree ("featured");--> statement-breakpoint
|
||||||
|
CREATE INDEX "recording_plays_recording_idx" ON "recording_plays" USING btree ("recording_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "recording_plays_user_idx" ON "recording_plays" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "recordings_slug_idx" ON "recordings" USING btree ("slug");--> statement-breakpoint
|
||||||
|
CREATE INDEX "recordings_user_idx" ON "recordings" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "recordings_status_idx" ON "recordings" USING btree ("status");--> statement-breakpoint
|
||||||
|
CREATE INDEX "recordings_public_idx" ON "recordings" USING btree ("public");
|
||||||
3
packages/backend/src/migrations/0001_is_admin.sql
Normal file
3
packages/backend/src/migrations/0001_is_admin.sql
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
ALTER TABLE "users" ADD COLUMN "is_admin" boolean NOT NULL DEFAULT false;--> statement-breakpoint
|
||||||
|
UPDATE "users" SET "is_admin" = true WHERE "role" = 'admin';--> statement-breakpoint
|
||||||
|
UPDATE "users" SET "role" = 'viewer' WHERE "role" = 'admin';
|
||||||
@@ -0,0 +1,8 @@
|
|||||||
|
-- Update any archived recordings to draft before removing the status
|
||||||
|
UPDATE "recordings" SET "status" = 'draft' WHERE "status" = 'archived';--> statement-breakpoint
|
||||||
|
|
||||||
|
-- Recreate enum without 'archived'
|
||||||
|
ALTER TYPE "public"."recording_status" RENAME TO "recording_status_old";--> statement-breakpoint
|
||||||
|
CREATE TYPE "public"."recording_status" AS ENUM('draft', 'published');--> statement-breakpoint
|
||||||
|
ALTER TABLE "recordings" ALTER COLUMN "status" TYPE "public"."recording_status" USING "status"::text::"public"."recording_status";--> statement-breakpoint
|
||||||
|
DROP TYPE "public"."recording_status_old";
|
||||||
1931
packages/backend/src/migrations/meta/0000_snapshot.json
Normal file
1931
packages/backend/src/migrations/meta/0000_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
27
packages/backend/src/migrations/meta/_journal.json
Normal file
27
packages/backend/src/migrations/meta/_journal.json
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"version": "7",
|
||||||
|
"dialect": "postgresql",
|
||||||
|
"entries": [
|
||||||
|
{
|
||||||
|
"idx": 0,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1772645674513,
|
||||||
|
"tag": "0000_pale_hellion",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 1,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1772645674514,
|
||||||
|
"tag": "0001_is_admin",
|
||||||
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 2,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1741337600000,
|
||||||
|
"tag": "0002_remove_archived_recording_status",
|
||||||
|
"breakpoints": true
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
565
packages/backend/src/scripts/data-migration.ts
Normal file
565
packages/backend/src/scripts/data-migration.ts
Normal file
@@ -0,0 +1,565 @@
|
|||||||
|
/**
|
||||||
|
* Data Migration: Directus → Custom Backend
|
||||||
|
*
|
||||||
|
* Migrates data from Directus tables to the new schema.
|
||||||
|
* Run with: tsx src/scripts/data-migration.ts
|
||||||
|
*
|
||||||
|
* Environment variables:
|
||||||
|
* DATABASE_URL - PostgreSQL connection (same DB)
|
||||||
|
* OLD_UPLOAD_DIR - Path to Directus uploads (e.g. /old-uploads)
|
||||||
|
* NEW_UPLOAD_DIR - Path to new upload dir (e.g. /data/uploads)
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { Pool } from "pg";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
const DATABASE_URL = process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy";
|
||||||
|
const OLD_UPLOAD_DIR = process.env.OLD_UPLOAD_DIR || "/old-uploads";
|
||||||
|
const NEW_UPLOAD_DIR = process.env.NEW_UPLOAD_DIR || "/data/uploads";
|
||||||
|
|
||||||
|
const pool = new Pool({ connectionString: DATABASE_URL });
|
||||||
|
|
||||||
|
async function query(sql: string, params: unknown[] = []) {
|
||||||
|
const client = await pool.connect();
|
||||||
|
try {
|
||||||
|
return await client.query(sql, params);
|
||||||
|
} finally {
|
||||||
|
client.release();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function copyFile(src: string, dest: string) {
|
||||||
|
const dir = path.dirname(dest);
|
||||||
|
if (!fs.existsSync(dir)) {
|
||||||
|
fs.mkdirSync(dir, { recursive: true });
|
||||||
|
}
|
||||||
|
if (fs.existsSync(src)) {
|
||||||
|
fs.copyFileSync(src, dest);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateFiles() {
|
||||||
|
console.log("📁 Migrating files...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, title, description, filename_disk, type, filesize, duration, uploaded_by, uploaded_on as date_created
|
||||||
|
FROM directus_files`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
let skipped = 0;
|
||||||
|
|
||||||
|
for (const file of rows) {
|
||||||
|
// Check if already migrated
|
||||||
|
const existing = await query("SELECT id FROM files WHERE id = $1", [file.id]);
|
||||||
|
if (existing.rows.length > 0) {
|
||||||
|
skipped++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO files (id, title, description, filename, mime_type, filesize, duration, uploaded_by, date_created)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
file.id,
|
||||||
|
file.title,
|
||||||
|
file.description,
|
||||||
|
file.filename_disk || `${file.id}`,
|
||||||
|
file.type,
|
||||||
|
file.filesize,
|
||||||
|
file.duration,
|
||||||
|
file.uploaded_by,
|
||||||
|
file.date_created,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
|
||||||
|
// Copy file to new location
|
||||||
|
const srcPath = path.join(OLD_UPLOAD_DIR, file.filename_disk || "");
|
||||||
|
const destPath = path.join(NEW_UPLOAD_DIR, file.id, file.filename_disk || `${file.id}`);
|
||||||
|
const copied = copyFile(srcPath, destPath);
|
||||||
|
|
||||||
|
if (!copied) {
|
||||||
|
console.warn(` ⚠️ File not found on disk: ${file.filename_disk}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Files: ${migrated} migrated, ${skipped} already existed`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateUsers() {
|
||||||
|
console.log("👥 Migrating users...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT u.id, u.email, u.password, u.first_name, u.last_name,
|
||||||
|
u.description, u.avatar, u.join_date as date_created,
|
||||||
|
u.artist_name, u.slug,
|
||||||
|
r.name as role_name
|
||||||
|
FROM directus_users u
|
||||||
|
LEFT JOIN directus_roles r ON u.role = r.id
|
||||||
|
WHERE u.status = 'active'`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
|
||||||
|
for (const user of rows) {
|
||||||
|
const existing = await query("SELECT id FROM users WHERE id = $1", [user.id]);
|
||||||
|
if (existing.rows.length > 0) {
|
||||||
|
migrated++;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const role =
|
||||||
|
user.role_name === "Model"
|
||||||
|
? "model"
|
||||||
|
: user.role_name === "Administrator"
|
||||||
|
? "admin"
|
||||||
|
: "viewer";
|
||||||
|
|
||||||
|
// Fetch tags from custom user fields if they exist
|
||||||
|
let tags: string[] = [];
|
||||||
|
try {
|
||||||
|
const tagsRes = await query("SELECT tags FROM directus_users WHERE id = $1", [user.id]);
|
||||||
|
if (tagsRes.rows[0]?.tags) {
|
||||||
|
tags = Array.isArray(tagsRes.rows[0].tags)
|
||||||
|
? tagsRes.rows[0].tags
|
||||||
|
: JSON.parse(String(tagsRes.rows[0].tags || "[]"));
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
/* tags column may not exist on older Directus installs */
|
||||||
|
}
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO users (id, email, password_hash, first_name, last_name, artist_name, slug,
|
||||||
|
description, tags, role, avatar, email_verified, date_created)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
user.id,
|
||||||
|
user.email,
|
||||||
|
user.password || "MIGRATED_NO_PASSWORD",
|
||||||
|
user.first_name,
|
||||||
|
user.last_name,
|
||||||
|
user.artist_name,
|
||||||
|
user.slug,
|
||||||
|
user.description,
|
||||||
|
tags,
|
||||||
|
role,
|
||||||
|
user.avatar,
|
||||||
|
true,
|
||||||
|
user.date_created,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Users: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateUserPhotos() {
|
||||||
|
console.log("🖼️ Migrating user photos...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT directus_users_id as user_id, directus_files_id as file_id
|
||||||
|
FROM junction_directus_users_files`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]);
|
||||||
|
const fileExists = await query("SELECT id FROM files WHERE id = $1", [row.file_id]);
|
||||||
|
if (!userExists.rows.length || !fileExists.rows.length) continue;
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO user_photos (user_id, file_id, sort) VALUES ($1, $2, $3)
|
||||||
|
ON CONFLICT DO NOTHING`,
|
||||||
|
[row.user_id, row.file_id, 0],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ User photos: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateArticles() {
|
||||||
|
console.log("📰 Migrating articles...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, slug, title, excerpt, content, image, tags, publish_date,
|
||||||
|
author, category, featured, date_created, date_updated
|
||||||
|
FROM sexy_articles`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const article of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO articles (id, slug, title, excerpt, content, image, tags, publish_date,
|
||||||
|
author, category, featured, date_created, date_updated)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
article.id,
|
||||||
|
article.slug,
|
||||||
|
article.title,
|
||||||
|
article.excerpt,
|
||||||
|
article.content,
|
||||||
|
article.image,
|
||||||
|
Array.isArray(article.tags) ? article.tags : JSON.parse(String(article.tags || "[]")),
|
||||||
|
article.publish_date,
|
||||||
|
article.author,
|
||||||
|
article.category,
|
||||||
|
article.featured,
|
||||||
|
article.date_created,
|
||||||
|
article.date_updated,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Articles: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateVideos() {
|
||||||
|
console.log("🎬 Migrating videos...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, slug, title, description, image, movie, tags, upload_date,
|
||||||
|
premium, featured
|
||||||
|
FROM sexy_videos`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const video of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO videos (id, slug, title, description, image, movie, tags, upload_date,
|
||||||
|
premium, featured, likes_count, plays_count)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
video.id,
|
||||||
|
video.slug,
|
||||||
|
video.title,
|
||||||
|
video.description,
|
||||||
|
video.image,
|
||||||
|
video.movie,
|
||||||
|
Array.isArray(video.tags) ? video.tags : JSON.parse(String(video.tags || "[]")),
|
||||||
|
video.upload_date,
|
||||||
|
video.premium,
|
||||||
|
video.featured,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Videos: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateVideoModels() {
|
||||||
|
console.log("🔗 Migrating video models...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT sexy_videos_id as video_id, directus_users_id as user_id
|
||||||
|
FROM sexy_videos_models`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
const videoExists = await query("SELECT id FROM videos WHERE id = $1", [row.video_id]);
|
||||||
|
const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]);
|
||||||
|
if (!videoExists.rows.length || !userExists.rows.length) continue;
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO video_models (video_id, user_id) VALUES ($1, $2) ON CONFLICT DO NOTHING`,
|
||||||
|
[row.video_id, row.user_id],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Video models: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateVideoLikes() {
|
||||||
|
console.log("❤️ Migrating video likes...");
|
||||||
|
const { rows } = await query(`SELECT id, video_id, user_id, date_created FROM sexy_video_likes`);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO video_likes (id, video_id, user_id, date_created) VALUES ($1, $2, $3, $4)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[row.id, row.video_id, row.user_id, row.date_created],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Video likes: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateVideoPlays() {
|
||||||
|
console.log("▶️ Migrating video plays...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, video_id, user_id, session_id, duration_watched, completed, date_created
|
||||||
|
FROM sexy_video_plays`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO video_plays (id, video_id, user_id, session_id, duration_watched, completed, date_created)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
row.id,
|
||||||
|
row.video_id,
|
||||||
|
row.user_id,
|
||||||
|
row.session_id,
|
||||||
|
row.duration_watched,
|
||||||
|
row.completed,
|
||||||
|
row.date_created,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Video plays: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateRecordings() {
|
||||||
|
console.log("🎙️ Migrating recordings...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, title, description, slug, duration, events, device_info,
|
||||||
|
user_created as user_id, status, tags, linked_video, public,
|
||||||
|
original_recording_id, date_created, date_updated
|
||||||
|
FROM sexy_recordings`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const recording of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO recordings (id, title, description, slug, duration, events, device_info,
|
||||||
|
user_id, status, tags, linked_video, public,
|
||||||
|
original_recording_id, date_created, date_updated)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
recording.id,
|
||||||
|
recording.title,
|
||||||
|
recording.description,
|
||||||
|
recording.slug,
|
||||||
|
recording.duration != null ? Math.round(Number(recording.duration)) : null,
|
||||||
|
typeof recording.events === "string" ? recording.events : JSON.stringify(recording.events),
|
||||||
|
typeof recording.device_info === "string"
|
||||||
|
? recording.device_info
|
||||||
|
: JSON.stringify(recording.device_info),
|
||||||
|
recording.user_id,
|
||||||
|
recording.status,
|
||||||
|
Array.isArray(recording.tags) ? recording.tags : JSON.parse(String(recording.tags || "[]")),
|
||||||
|
recording.linked_video,
|
||||||
|
recording.public,
|
||||||
|
recording.original_recording_id,
|
||||||
|
recording.date_created,
|
||||||
|
recording.date_updated,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Recordings: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateRecordingPlays() {
|
||||||
|
console.log("▶️ Migrating recording plays...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, user_id, recording_id, duration_played, completed, date_created
|
||||||
|
FROM sexy_recording_plays`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO recording_plays (id, recording_id, user_id, duration_played, completed, date_created)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[row.id, row.recording_id, row.user_id, row.duration_played, row.completed, row.date_created],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Recording plays: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateComments() {
|
||||||
|
console.log("💬 Migrating comments...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, collection, item, comment, user_created as user_id, date_created
|
||||||
|
FROM directus_comments
|
||||||
|
WHERE collection IN ('sexy_videos', 'sexy_recordings')`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
// Map collection names
|
||||||
|
const collection = row.collection === "sexy_videos" ? "videos" : "recordings";
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO comments (collection, item_id, comment, user_id, date_created)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)`,
|
||||||
|
[collection, row.item, row.comment, row.user_id, row.date_created],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Comments: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateAchievements() {
|
||||||
|
console.log("🏆 Migrating achievements...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT id, code, name, description, icon, category, required_count, points_reward, status, sort
|
||||||
|
FROM sexy_achievements`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
await query(
|
||||||
|
`INSERT INTO achievements (id, code, name, description, icon, category, required_count, points_reward, status, sort)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
||||||
|
ON CONFLICT (id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
row.id,
|
||||||
|
row.code,
|
||||||
|
row.name,
|
||||||
|
row.description,
|
||||||
|
row.icon,
|
||||||
|
row.category,
|
||||||
|
row.required_count,
|
||||||
|
row.points_reward,
|
||||||
|
row.status,
|
||||||
|
row.sort,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ Achievements: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateUserAchievements() {
|
||||||
|
console.log("🎖️ Migrating user achievements...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT user_id, achievement_id, progress, date_unlocked FROM sexy_user_achievements`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]);
|
||||||
|
const achievementExists = await query("SELECT id FROM achievements WHERE id = $1", [
|
||||||
|
row.achievement_id,
|
||||||
|
]);
|
||||||
|
if (!userExists.rows.length || !achievementExists.rows.length) continue;
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO user_achievements (user_id, achievement_id, progress, date_unlocked)
|
||||||
|
VALUES ($1, $2, $3, $4)
|
||||||
|
ON CONFLICT (user_id, achievement_id) DO NOTHING`,
|
||||||
|
[row.user_id, row.achievement_id, row.progress, row.date_unlocked],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ User achievements: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateUserPoints() {
|
||||||
|
console.log("💎 Migrating user points...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT user_id, action, points, recording_id, date_created FROM sexy_user_points`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]);
|
||||||
|
if (!userExists.rows.length) continue;
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO user_points (user_id, action, points, recording_id, date_created)
|
||||||
|
VALUES ($1, $2, $3, $4, $5)`,
|
||||||
|
[row.user_id, row.action, row.points, row.recording_id, row.date_created],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ User points: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function migrateUserStats() {
|
||||||
|
console.log("📊 Migrating user stats...");
|
||||||
|
const { rows } = await query(
|
||||||
|
`SELECT user_id, total_raw_points, total_weighted_points, recordings_count,
|
||||||
|
playbacks_count, comments_count, achievements_count, last_updated
|
||||||
|
FROM sexy_user_stats`,
|
||||||
|
);
|
||||||
|
|
||||||
|
let migrated = 0;
|
||||||
|
for (const row of rows) {
|
||||||
|
const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]);
|
||||||
|
if (!userExists.rows.length) continue;
|
||||||
|
|
||||||
|
await query(
|
||||||
|
`INSERT INTO user_stats (user_id, total_raw_points, total_weighted_points, recordings_count,
|
||||||
|
playbacks_count, comments_count, achievements_count, last_updated)
|
||||||
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
||||||
|
ON CONFLICT (user_id) DO NOTHING`,
|
||||||
|
[
|
||||||
|
row.user_id,
|
||||||
|
row.total_raw_points,
|
||||||
|
row.total_weighted_points,
|
||||||
|
row.recordings_count,
|
||||||
|
row.playbacks_count,
|
||||||
|
row.comments_count,
|
||||||
|
row.achievements_count,
|
||||||
|
row.last_updated,
|
||||||
|
],
|
||||||
|
);
|
||||||
|
migrated++;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(` ✅ User stats: ${migrated} migrated`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log("🚀 Starting data migration from Directus to custom backend...\n");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Verify connection
|
||||||
|
await query("SELECT 1");
|
||||||
|
console.log("✅ Database connected\n");
|
||||||
|
|
||||||
|
// Migration order respects FK dependencies
|
||||||
|
await migrateFiles();
|
||||||
|
await migrateUsers();
|
||||||
|
await migrateUserPhotos();
|
||||||
|
await migrateArticles();
|
||||||
|
await migrateVideos();
|
||||||
|
await migrateVideoModels();
|
||||||
|
await migrateVideoLikes();
|
||||||
|
await migrateVideoPlays();
|
||||||
|
await migrateRecordings();
|
||||||
|
await migrateRecordingPlays();
|
||||||
|
await migrateComments();
|
||||||
|
await migrateAchievements();
|
||||||
|
await migrateUserAchievements();
|
||||||
|
await migrateUserPoints();
|
||||||
|
await migrateUserStats();
|
||||||
|
|
||||||
|
console.log("\n🎉 Migration complete!");
|
||||||
|
} catch (error) {
|
||||||
|
console.error("❌ Migration failed:", error);
|
||||||
|
process.exit(1);
|
||||||
|
} finally {
|
||||||
|
await pool.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main();
|
||||||
27
packages/backend/src/scripts/migrate.ts
Normal file
27
packages/backend/src/scripts/migrate.ts
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import { Pool } from "pg";
|
||||||
|
import { drizzle } from "drizzle-orm/node-postgres";
|
||||||
|
import { migrate } from "drizzle-orm/node-postgres/migrator";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
const pool = new Pool({
|
||||||
|
connectionString: process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy",
|
||||||
|
});
|
||||||
|
|
||||||
|
const db = drizzle(pool);
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
console.log("Running schema migrations...");
|
||||||
|
// In dev (tsx): __dirname = src/scripts → migrations are at src/migrations
|
||||||
|
// In prod (node dist): __dirname = dist/scripts → migrations are at ../../migrations (package root)
|
||||||
|
const migrationsFolder = __dirname.includes("/src/")
|
||||||
|
? path.join(__dirname, "../migrations")
|
||||||
|
: path.join(__dirname, "../../migrations");
|
||||||
|
await migrate(db, { migrationsFolder });
|
||||||
|
console.log("Schema migrations complete.");
|
||||||
|
await pool.end();
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((err) => {
|
||||||
|
console.error("Migration failed:", err);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
20
packages/backend/tsconfig.json
Normal file
20
packages/backend/tsconfig.json
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"target": "ES2022",
|
||||||
|
"module": "CommonJS",
|
||||||
|
"moduleResolution": "Node",
|
||||||
|
"lib": ["ES2022"],
|
||||||
|
"outDir": "./dist",
|
||||||
|
"rootDir": "./src",
|
||||||
|
"strict": true,
|
||||||
|
"skipLibCheck": true,
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"emitDecoratorMetadata": true,
|
||||||
|
"declaration": true,
|
||||||
|
"declarationMap": true,
|
||||||
|
"sourceMap": true
|
||||||
|
},
|
||||||
|
"include": ["src/**/*"],
|
||||||
|
"exclude": ["node_modules", "dist"]
|
||||||
|
}
|
||||||
@@ -1,54 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "@sexy.pivoine.art/bundle",
|
|
||||||
"description": "Please enter a description for your extension",
|
|
||||||
"icon": "extension",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"keywords": [
|
|
||||||
"directus",
|
|
||||||
"directus-extension",
|
|
||||||
"directus-extension-bundle"
|
|
||||||
],
|
|
||||||
"type": "module",
|
|
||||||
"files": [
|
|
||||||
"dist"
|
|
||||||
],
|
|
||||||
"directus:extension": {
|
|
||||||
"type": "bundle",
|
|
||||||
"path": {
|
|
||||||
"app": "dist/app.js",
|
|
||||||
"api": "dist/api.js"
|
|
||||||
},
|
|
||||||
"entries": [
|
|
||||||
{
|
|
||||||
"name": "endpoint",
|
|
||||||
"type": "endpoint",
|
|
||||||
"source": "src/endpoint"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hook",
|
|
||||||
"type": "hook",
|
|
||||||
"source": "src/hook"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "theme",
|
|
||||||
"type": "theme",
|
|
||||||
"source": "src/theme"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"host": "^11.11.0"
|
|
||||||
},
|
|
||||||
"scripts": {
|
|
||||||
"build": "directus-extension build",
|
|
||||||
"dev": "directus-extension build -w --no-minify",
|
|
||||||
"link": "directus-extension link",
|
|
||||||
"validate": "directus-extension validate",
|
|
||||||
"add": "directus-extension add"
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@directus/extensions-sdk": "16.0.2"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@sindresorhus/slugify": "^3.0.0",
|
|
||||||
"fluent-ffmpeg": "^2.1.3"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,336 +0,0 @@
|
|||||||
/**
|
|
||||||
* Gamification Helper Functions
|
|
||||||
* Handles points, achievements, and user stats for recording-focused gamification system
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type { Knex } from "knex";
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Point values for different actions
|
|
||||||
*/
|
|
||||||
export const POINT_VALUES = {
|
|
||||||
RECORDING_CREATE: 50,
|
|
||||||
RECORDING_PLAY: 10,
|
|
||||||
RECORDING_COMPLETE: 5,
|
|
||||||
COMMENT_CREATE: 5,
|
|
||||||
RECORDING_FEATURED: 100,
|
|
||||||
} as const;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Time decay constant for weighted scoring
|
|
||||||
* λ = 0.005 means ~14% decay per month
|
|
||||||
*/
|
|
||||||
const DECAY_LAMBDA = 0.005;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Award points to a user for a specific action
|
|
||||||
*/
|
|
||||||
export async function awardPoints(
|
|
||||||
database: Knex,
|
|
||||||
userId: string,
|
|
||||||
action: keyof typeof POINT_VALUES,
|
|
||||||
recordingId?: string,
|
|
||||||
): Promise<void> {
|
|
||||||
const points = POINT_VALUES[action];
|
|
||||||
|
|
||||||
await database("sexy_user_points").insert({
|
|
||||||
user_id: userId,
|
|
||||||
action,
|
|
||||||
points,
|
|
||||||
recording_id: recordingId || null,
|
|
||||||
date_created: new Date(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Update cached stats
|
|
||||||
await updateUserStats(database, userId);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Calculate time-weighted score using exponential decay
|
|
||||||
* Score = Σ (points × e^(-λ × age_in_days))
|
|
||||||
*/
|
|
||||||
export async function calculateWeightedScore(
|
|
||||||
database: Knex,
|
|
||||||
userId: string,
|
|
||||||
): Promise<number> {
|
|
||||||
const now = new Date();
|
|
||||||
|
|
||||||
const result = await database("sexy_user_points")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.select(
|
|
||||||
database.raw(`
|
|
||||||
SUM(
|
|
||||||
points * EXP(-${DECAY_LAMBDA} * EXTRACT(EPOCH FROM (? - date_created)) / 86400)
|
|
||||||
) as weighted_score
|
|
||||||
`, [now]),
|
|
||||||
);
|
|
||||||
|
|
||||||
return result[0]?.weighted_score || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update or create user stats cache
|
|
||||||
*/
|
|
||||||
export async function updateUserStats(database: Knex, userId: string): Promise<void> {
|
|
||||||
const now = new Date();
|
|
||||||
|
|
||||||
// Calculate raw points
|
|
||||||
const rawPointsResult = await database("sexy_user_points")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.sum("points as total");
|
|
||||||
const totalRawPoints = rawPointsResult[0]?.total || 0;
|
|
||||||
|
|
||||||
// Calculate weighted points
|
|
||||||
const totalWeightedPoints = await calculateWeightedScore(database, userId);
|
|
||||||
|
|
||||||
// Get recordings count
|
|
||||||
const recordingsResult = await database("sexy_recordings")
|
|
||||||
.where({ user_created: userId, status: "published" })
|
|
||||||
.count("* as count");
|
|
||||||
const recordingsCount = recordingsResult[0]?.count || 0;
|
|
||||||
|
|
||||||
// Get playbacks count (excluding own recordings)
|
|
||||||
const playbacksResult = await database("sexy_recording_plays")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.whereNotIn("recording_id", function () {
|
|
||||||
this.select("id").from("sexy_recordings").where("user_created", userId);
|
|
||||||
})
|
|
||||||
.count("* as count");
|
|
||||||
const playbacksCount = playbacksResult[0]?.count || 0;
|
|
||||||
|
|
||||||
// Get comments count (on recordings only)
|
|
||||||
const commentsResult = await database("comments")
|
|
||||||
.where({ user_created: userId, collection: "sexy_recordings" })
|
|
||||||
.count("* as count");
|
|
||||||
const commentsCount = commentsResult[0]?.count || 0;
|
|
||||||
|
|
||||||
// Get achievements count
|
|
||||||
const achievementsResult = await database("sexy_user_achievements")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.whereNotNull("date_unlocked")
|
|
||||||
.count("* as count");
|
|
||||||
const achievementsCount = achievementsResult[0]?.count || 0;
|
|
||||||
|
|
||||||
// Upsert stats
|
|
||||||
const existing = await database("sexy_user_stats")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.first();
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
await database("sexy_user_stats")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.update({
|
|
||||||
total_raw_points: totalRawPoints,
|
|
||||||
total_weighted_points: totalWeightedPoints,
|
|
||||||
recordings_count: recordingsCount,
|
|
||||||
playbacks_count: playbacksCount,
|
|
||||||
comments_count: commentsCount,
|
|
||||||
achievements_count: achievementsCount,
|
|
||||||
last_updated: now,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
await database("sexy_user_stats").insert({
|
|
||||||
user_id: userId,
|
|
||||||
total_raw_points: totalRawPoints,
|
|
||||||
total_weighted_points: totalWeightedPoints,
|
|
||||||
recordings_count: recordingsCount,
|
|
||||||
playbacks_count: playbacksCount,
|
|
||||||
comments_count: commentsCount,
|
|
||||||
achievements_count: achievementsCount,
|
|
||||||
last_updated: now,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Check and update achievement progress for a user
|
|
||||||
*/
|
|
||||||
export async function checkAchievements(
|
|
||||||
database: Knex,
|
|
||||||
userId: string,
|
|
||||||
category?: string,
|
|
||||||
): Promise<void> {
|
|
||||||
// Get all achievements (optionally filtered by category)
|
|
||||||
let achievementsQuery = database("sexy_achievements")
|
|
||||||
.where({ status: "published" });
|
|
||||||
|
|
||||||
if (category) {
|
|
||||||
achievementsQuery = achievementsQuery.where({ category });
|
|
||||||
}
|
|
||||||
|
|
||||||
const achievements = await achievementsQuery;
|
|
||||||
|
|
||||||
for (const achievement of achievements) {
|
|
||||||
const progress = await getAchievementProgress(database, userId, achievement);
|
|
||||||
|
|
||||||
// Check if already unlocked
|
|
||||||
const existing = await database("sexy_user_achievements")
|
|
||||||
.where({ user_id: userId, achievement_id: achievement.id })
|
|
||||||
.first();
|
|
||||||
|
|
||||||
const isUnlocked = progress >= achievement.required_count;
|
|
||||||
const wasUnlocked = existing?.date_unlocked !== null;
|
|
||||||
|
|
||||||
if (existing) {
|
|
||||||
// Update progress
|
|
||||||
await database("sexy_user_achievements")
|
|
||||||
.where({ user_id: userId, achievement_id: achievement.id })
|
|
||||||
.update({
|
|
||||||
progress,
|
|
||||||
date_unlocked: isUnlocked ? (existing.date_unlocked || new Date()) : null,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
// Insert new progress
|
|
||||||
await database("sexy_user_achievements").insert({
|
|
||||||
user_id: userId,
|
|
||||||
achievement_id: achievement.id,
|
|
||||||
progress,
|
|
||||||
date_unlocked: isUnlocked ? new Date() : null,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Award bonus points if newly unlocked
|
|
||||||
if (isUnlocked && !wasUnlocked && achievement.points_reward > 0) {
|
|
||||||
await database("sexy_user_points").insert({
|
|
||||||
user_id: userId,
|
|
||||||
action: `ACHIEVEMENT_${achievement.code}`,
|
|
||||||
points: achievement.points_reward,
|
|
||||||
recording_id: null,
|
|
||||||
date_created: new Date(),
|
|
||||||
});
|
|
||||||
|
|
||||||
// Refresh stats after awarding bonus
|
|
||||||
await updateUserStats(database, userId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get progress for a specific achievement
|
|
||||||
*/
|
|
||||||
async function getAchievementProgress(
|
|
||||||
database: Knex,
|
|
||||||
userId: string,
|
|
||||||
achievement: any,
|
|
||||||
): Promise<number> {
|
|
||||||
const { code } = achievement;
|
|
||||||
|
|
||||||
// Recordings achievements
|
|
||||||
if (code === "first_recording" || code === "recording_10" || code === "recording_50" || code === "recording_100") {
|
|
||||||
const result = await database("sexy_recordings")
|
|
||||||
.where({ user_created: userId, status: "published" })
|
|
||||||
.count("* as count");
|
|
||||||
return result[0]?.count || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Featured recording
|
|
||||||
if (code === "featured_recording") {
|
|
||||||
const result = await database("sexy_recordings")
|
|
||||||
.where({ user_created: userId, status: "published", featured: true })
|
|
||||||
.count("* as count");
|
|
||||||
return result[0]?.count || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Playback achievements (excluding own recordings)
|
|
||||||
if (code === "first_play" || code === "play_100" || code === "play_500") {
|
|
||||||
const result = await database("sexy_recording_plays as rp")
|
|
||||||
.leftJoin("sexy_recordings as r", "rp.recording_id", "r.id")
|
|
||||||
.where({ "rp.user_id": userId })
|
|
||||||
.where("r.user_created", "!=", userId)
|
|
||||||
.count("* as count");
|
|
||||||
return result[0]?.count || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Completionist achievements
|
|
||||||
if (code === "completionist_10" || code === "completionist_100") {
|
|
||||||
const result = await database("sexy_recording_plays")
|
|
||||||
.where({ user_id: userId, completed: true })
|
|
||||||
.count("* as count");
|
|
||||||
return result[0]?.count || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Social achievements
|
|
||||||
if (code === "first_comment" || code === "comment_50" || code === "comment_250") {
|
|
||||||
const result = await database("comments")
|
|
||||||
.where({ user_created: userId, collection: "sexy_recordings" })
|
|
||||||
.count("* as count");
|
|
||||||
return result[0]?.count || 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special: Early adopter (joined in first month)
|
|
||||||
if (code === "early_adopter") {
|
|
||||||
const user = await database("directus_users")
|
|
||||||
.where({ id: userId })
|
|
||||||
.first();
|
|
||||||
|
|
||||||
if (user) {
|
|
||||||
const joinDate = new Date(user.date_created);
|
|
||||||
const platformLaunch = new Date("2025-01-01"); // Adjust to actual launch date
|
|
||||||
const oneMonthAfterLaunch = new Date(platformLaunch);
|
|
||||||
oneMonthAfterLaunch.setMonth(oneMonthAfterLaunch.getMonth() + 1);
|
|
||||||
|
|
||||||
return joinDate <= oneMonthAfterLaunch ? 1 : 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special: One year anniversary
|
|
||||||
if (code === "one_year") {
|
|
||||||
const user = await database("directus_users")
|
|
||||||
.where({ id: userId })
|
|
||||||
.first();
|
|
||||||
|
|
||||||
if (user) {
|
|
||||||
const joinDate = new Date(user.date_created);
|
|
||||||
const oneYearAgo = new Date();
|
|
||||||
oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1);
|
|
||||||
|
|
||||||
return joinDate <= oneYearAgo ? 1 : 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special: Balanced creator (50 recordings + 100 plays)
|
|
||||||
if (code === "balanced_creator") {
|
|
||||||
const recordings = await database("sexy_recordings")
|
|
||||||
.where({ user_created: userId, status: "published" })
|
|
||||||
.count("* as count");
|
|
||||||
const plays = await database("sexy_recording_plays as rp")
|
|
||||||
.leftJoin("sexy_recordings as r", "rp.recording_id", "r.id")
|
|
||||||
.where({ "rp.user_id": userId })
|
|
||||||
.where("r.user_created", "!=", userId)
|
|
||||||
.count("* as count");
|
|
||||||
|
|
||||||
const recordingsCount = recordings[0]?.count || 0;
|
|
||||||
const playsCount = plays[0]?.count || 0;
|
|
||||||
|
|
||||||
return (recordingsCount >= 50 && playsCount >= 100) ? 1 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Special: Top 10 rank
|
|
||||||
if (code === "top_10_rank") {
|
|
||||||
const userStats = await database("sexy_user_stats")
|
|
||||||
.where({ user_id: userId })
|
|
||||||
.first();
|
|
||||||
|
|
||||||
if (!userStats) return 0;
|
|
||||||
|
|
||||||
const rank = await database("sexy_user_stats")
|
|
||||||
.where("total_weighted_points", ">", userStats.total_weighted_points)
|
|
||||||
.count("* as count");
|
|
||||||
|
|
||||||
const userRank = (rank[0]?.count || 0) + 1;
|
|
||||||
return userRank <= 10 ? 1 : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Recalculate all weighted scores (for cron job)
|
|
||||||
*/
|
|
||||||
export async function recalculateAllWeightedScores(database: Knex): Promise<void> {
|
|
||||||
const users = await database("sexy_user_stats").select("user_id");
|
|
||||||
|
|
||||||
for (const user of users) {
|
|
||||||
await updateUserStats(database, user.user_id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,145 +0,0 @@
|
|||||||
import { createRequire } from "module";
|
|
||||||
global.require = createRequire(import.meta.url);
|
|
||||||
import { defineHook } from "@directus/extensions-sdk";
|
|
||||||
import slugify from "@sindresorhus/slugify";
|
|
||||||
import ffmpeg from "fluent-ffmpeg";
|
|
||||||
import { awardPoints, checkAchievements } from "../endpoint/gamification.js";
|
|
||||||
|
|
||||||
async function processVideo(
|
|
||||||
meta,
|
|
||||||
{ schema, accountability },
|
|
||||||
services,
|
|
||||||
logger,
|
|
||||||
) {
|
|
||||||
const { FilesService } = services;
|
|
||||||
const itemId = meta.key;
|
|
||||||
const videoPath = `/directus/uploads/${meta.payload.filename_disk}`; // Adjust path as needed
|
|
||||||
const videoService = new FilesService({ schema, accountability }); // Replace with your collection name
|
|
||||||
|
|
||||||
try {
|
|
||||||
const durationInSeconds = await new Promise((resolve, reject) => {
|
|
||||||
ffmpeg.ffprobe(videoPath, function (err, metadata) {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
resolve(parseInt(metadata.format.duration));
|
|
||||||
});
|
|
||||||
});
|
|
||||||
// Update the item with the duration
|
|
||||||
await videoService.updateOne(itemId, { duration: durationInSeconds });
|
|
||||||
logger.info(`Video ${itemId} duration updated to ${durationInSeconds}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error(`Error processing video ${itemId}:`, error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export default defineHook(async ({ filter, action }, { services, logger, database, getSchema }) => {
|
|
||||||
action("files.upload", async (meta, context) => {
|
|
||||||
await processVideo(meta, context, services, logger);
|
|
||||||
});
|
|
||||||
|
|
||||||
filter(
|
|
||||||
"users.create",
|
|
||||||
(payload: {
|
|
||||||
first_name: string;
|
|
||||||
last_name: string;
|
|
||||||
artist_name: string;
|
|
||||||
slug: string;
|
|
||||||
}) => {
|
|
||||||
const artist_name = `${payload.first_name}-${new Date().getTime()}`;
|
|
||||||
const slug = slugify(artist_name);
|
|
||||||
const join_date = new Date();
|
|
||||||
return { ...payload, artist_name, slug, join_date };
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
filter(
|
|
||||||
"users.update",
|
|
||||||
(payload: {
|
|
||||||
first_name: string;
|
|
||||||
last_name: string;
|
|
||||||
artist_name: string;
|
|
||||||
slug: string;
|
|
||||||
}) => {
|
|
||||||
if (payload.artist_name) {
|
|
||||||
const slug = slugify(payload.artist_name);
|
|
||||||
return { ...payload, slug };
|
|
||||||
}
|
|
||||||
return payload;
|
|
||||||
},
|
|
||||||
);
|
|
||||||
|
|
||||||
// =========================================
|
|
||||||
// GAMIFICATION HOOKS
|
|
||||||
// =========================================
|
|
||||||
|
|
||||||
// Hook: Award points when recording is published
|
|
||||||
action("items.create", async (meta, { collection, accountability }) => {
|
|
||||||
if (collection === "sexy_recordings") {
|
|
||||||
const { payload, key } = meta;
|
|
||||||
|
|
||||||
// Award points if recording is published
|
|
||||||
if (payload.status === "published" && accountability?.user) {
|
|
||||||
try {
|
|
||||||
await awardPoints(database, accountability.user, "RECORDING_CREATE", key);
|
|
||||||
await checkAchievements(database, accountability.user, "recordings");
|
|
||||||
logger.info(`Awarded RECORDING_CREATE points to user ${accountability.user}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Failed to award recording creation points:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Hook: Award points when recording status changes to published or featured
|
|
||||||
action("items.update", async (meta, { collection, accountability, schema }) => {
|
|
||||||
if (collection === "sexy_recordings") {
|
|
||||||
const { payload, keys } = meta;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { ItemsService } = services;
|
|
||||||
const recordingsService = new ItemsService("sexy_recordings", {
|
|
||||||
schema: await getSchema(),
|
|
||||||
});
|
|
||||||
|
|
||||||
for (const key of keys) {
|
|
||||||
const recording = await recordingsService.readOne(key);
|
|
||||||
|
|
||||||
// Award points if status changed from non-published to published
|
|
||||||
if (payload.status === "published" && recording.status !== "published" && recording.user_created) {
|
|
||||||
await awardPoints(database, recording.user_created, "RECORDING_CREATE", key);
|
|
||||||
await checkAchievements(database, recording.user_created, "recordings");
|
|
||||||
logger.info(`Awarded RECORDING_CREATE points to user ${recording.user_created}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Award bonus points if recording becomes featured
|
|
||||||
if (payload.featured === true && !recording.featured && recording.user_created) {
|
|
||||||
await awardPoints(database, recording.user_created, "RECORDING_FEATURED", key);
|
|
||||||
await checkAchievements(database, recording.user_created, "recordings");
|
|
||||||
logger.info(`Awarded RECORDING_FEATURED points to user ${recording.user_created}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Failed to award recording update points:", error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Hook: Award points when user creates a comment on a recording
|
|
||||||
action("comments.create", async (meta, { accountability }) => {
|
|
||||||
if (!accountability?.user) return;
|
|
||||||
|
|
||||||
try {
|
|
||||||
const { payload } = meta;
|
|
||||||
|
|
||||||
// Check if comment is on a recording
|
|
||||||
if (payload.collection === "sexy_recordings") {
|
|
||||||
await awardPoints(database, accountability.user, "COMMENT_CREATE");
|
|
||||||
await checkAchievements(database, accountability.user, "social");
|
|
||||||
logger.info(`Awarded COMMENT_CREATE points to user ${accountability.user}`);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error("Failed to award comment points:", error);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
@@ -1,130 +0,0 @@
|
|||||||
import { defineTheme } from "@directus/extensions-sdk";
|
|
||||||
import "./style.css";
|
|
||||||
|
|
||||||
export default defineTheme({
|
|
||||||
id: "@sexy.pivoine.art/theme",
|
|
||||||
name: "Sexy.Art Dark",
|
|
||||||
appearance: "dark",
|
|
||||||
rules: {
|
|
||||||
borderRadius: "6px",
|
|
||||||
borderWidth: "2px",
|
|
||||||
foreground: "#c9d1d9",
|
|
||||||
foregroundSubdued: "#666672",
|
|
||||||
foregroundAccent: "#f0f6fc",
|
|
||||||
background: "#0D1117",
|
|
||||||
backgroundNormal: "#21262E",
|
|
||||||
backgroundAccent: "#30363D",
|
|
||||||
backgroundSubdued: "#161B22",
|
|
||||||
borderColor: "#21262E",
|
|
||||||
borderColorAccent: "#30363D",
|
|
||||||
borderColorSubdued: "#161B22",
|
|
||||||
primary: "#ce47eb",
|
|
||||||
secondary: "#613dff",
|
|
||||||
success: "#87ff66",
|
|
||||||
warning: "#ffbf66",
|
|
||||||
danger: "#ff6467",
|
|
||||||
navigation: {
|
|
||||||
background: "#21262E",
|
|
||||||
backgroundAccent: "#30363D",
|
|
||||||
borderWidth: "0px",
|
|
||||||
borderColor: "transparent",
|
|
||||||
project: {
|
|
||||||
background: "#30363D",
|
|
||||||
borderWidth: "0px",
|
|
||||||
borderColor: "transparent",
|
|
||||||
},
|
|
||||||
modules: {
|
|
||||||
borderWidth: "0px",
|
|
||||||
borderColor: "transparent",
|
|
||||||
button: {
|
|
||||||
foregroundHover: "#fff",
|
|
||||||
background: "transparent",
|
|
||||||
backgroundHover: "transparent",
|
|
||||||
backgroundActive: "#21262E",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
list: {
|
|
||||||
background: "transparent",
|
|
||||||
backgroundHover: "#30363D",
|
|
||||||
backgroundActive: "#30363D",
|
|
||||||
divider: {
|
|
||||||
borderColor: "#30363D",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
header: {
|
|
||||||
borderWidth: "0px",
|
|
||||||
borderColor: "transparent",
|
|
||||||
boxShadow: "0 4px 7px -4px black",
|
|
||||||
},
|
|
||||||
form: {
|
|
||||||
columnGap: "32px",
|
|
||||||
rowGap: "40px",
|
|
||||||
field: {
|
|
||||||
label: {
|
|
||||||
fontWeight: "600",
|
|
||||||
},
|
|
||||||
input: {
|
|
||||||
borderColor: "#21262E",
|
|
||||||
borderColorHover: "#30363D",
|
|
||||||
boxShadow: "none",
|
|
||||||
boxShadowHover: "none",
|
|
||||||
height: "60px",
|
|
||||||
padding: "16px",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
sidebar: {
|
|
||||||
background: "#21262E",
|
|
||||||
borderWidth: "0px",
|
|
||||||
borderColor: "transparent",
|
|
||||||
section: {
|
|
||||||
toggle: {
|
|
||||||
background: "#30363D",
|
|
||||||
borderWidth: "0px",
|
|
||||||
borderColor: "transparent",
|
|
||||||
},
|
|
||||||
form: {
|
|
||||||
field: {
|
|
||||||
input: {
|
|
||||||
height: "52px",
|
|
||||||
padding: "12px",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
public: {
|
|
||||||
art: {
|
|
||||||
background: "#21262E",
|
|
||||||
speed: "1",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
popover: {
|
|
||||||
menu: {
|
|
||||||
background: "#30363D",
|
|
||||||
boxShadow: "0px 0px 6px 0px black",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
banner: {
|
|
||||||
background: "#161B22",
|
|
||||||
padding: "40px",
|
|
||||||
avatar: {
|
|
||||||
background: "#fff",
|
|
||||||
borderRadius: "50%",
|
|
||||||
},
|
|
||||||
headline: {
|
|
||||||
foreground: "#fff",
|
|
||||||
},
|
|
||||||
title: {
|
|
||||||
foreground: "#fff",
|
|
||||||
},
|
|
||||||
subtitle: {
|
|
||||||
foreground: "#969696",
|
|
||||||
},
|
|
||||||
art: {
|
|
||||||
foreground: "#21262E",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
{
|
|
||||||
"compilerOptions": {
|
|
||||||
"target": "ES2022",
|
|
||||||
"lib": ["ES2022", "DOM"],
|
|
||||||
"module": "ES2022",
|
|
||||||
"moduleResolution": "node",
|
|
||||||
"strict": false,
|
|
||||||
"noFallthroughCasesInSwitch": true,
|
|
||||||
"esModuleInterop": true,
|
|
||||||
"noImplicitAny": false,
|
|
||||||
"noImplicitThis": true,
|
|
||||||
"noImplicitReturns": true,
|
|
||||||
"noUnusedLocals": true,
|
|
||||||
"noUncheckedIndexedAccess": true,
|
|
||||||
"noUnusedParameters": true,
|
|
||||||
"alwaysStrict": true,
|
|
||||||
"strictNullChecks": true,
|
|
||||||
"strictFunctionTypes": true,
|
|
||||||
"strictBindCallApply": true,
|
|
||||||
"strictPropertyInitialization": true,
|
|
||||||
"resolveJsonModule": false,
|
|
||||||
"skipLibCheck": true,
|
|
||||||
"forceConsistentCasingInFileNames": true,
|
|
||||||
"allowSyntheticDefaultImports": true,
|
|
||||||
"isolatedModules": true,
|
|
||||||
"allowJs": true
|
|
||||||
},
|
|
||||||
"include": ["./src/**/*.ts"]
|
|
||||||
}
|
|
||||||
53
packages/buttplug/Cargo.lock
generated
53
packages/buttplug/Cargo.lock
generated
@@ -177,7 +177,7 @@ checksum = "46c5e41b57b8bba42a04676d81cb89e9ee8e859a1a66f80a5a72e1cb76b34d43"
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "buttplug_core"
|
name = "buttplug_core"
|
||||||
version = "10.0.0"
|
version = "10.0.0"
|
||||||
source = "git+https://github.com/valknarthing/buttplug.git#c569409c51ad15f343c3f97a57711cdaa358f2ea"
|
source = "git+https://github.com/valknarthing/buttplug.git?rev=fad6c9d#fad6c9d97895218b01ceb55fd4a872a89043194a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-stream",
|
"async-stream",
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
@@ -203,7 +203,7 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "buttplug_server"
|
name = "buttplug_server"
|
||||||
version = "10.0.0"
|
version = "10.0.0"
|
||||||
source = "git+https://github.com/valknarthing/buttplug.git#c569409c51ad15f343c3f97a57711cdaa358f2ea"
|
source = "git+https://github.com/valknarthing/buttplug.git?rev=fad6c9d#fad6c9d97895218b01ceb55fd4a872a89043194a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"aes",
|
"aes",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
@@ -243,8 +243,8 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "buttplug_server_device_config"
|
name = "buttplug_server_device_config"
|
||||||
version = "10.0.0"
|
version = "10.0.1"
|
||||||
source = "git+https://github.com/valknarthing/buttplug.git#c569409c51ad15f343c3f97a57711cdaa358f2ea"
|
source = "git+https://github.com/valknarthing/buttplug.git?rev=fad6c9d#fad6c9d97895218b01ceb55fd4a872a89043194a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"buttplug_core",
|
"buttplug_core",
|
||||||
"dashmap",
|
"dashmap",
|
||||||
@@ -913,9 +913,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "js-sys"
|
name = "js-sys"
|
||||||
version = "0.3.80"
|
version = "0.3.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "852f13bec5eba4ba9afbeb93fd7c13fe56147f055939ae21c43a29a0ecb2702e"
|
checksum = "93f0862381daaec758576dcc22eb7bbf4d7efd67328553f3b45a412a51a3fb21"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
@@ -1860,9 +1860,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen"
|
name = "wasm-bindgen"
|
||||||
version = "0.2.103"
|
version = "0.2.110"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ab10a69fbd0a177f5f649ad4d8d3305499c42bab9aef2f7ff592d0ec8f833819"
|
checksum = "1de241cdc66a9d91bd84f097039eb140cdc6eec47e0cdbaf9d932a1dd6c35866"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
@@ -1873,27 +1873,14 @@ dependencies = [
|
|||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wasm-bindgen-backend"
|
|
||||||
version = "0.2.103"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0bb702423545a6007bbc368fde243ba47ca275e549c8a28617f56f6ba53b1d1c"
|
|
||||||
dependencies = [
|
|
||||||
"bumpalo",
|
|
||||||
"log",
|
|
||||||
"proc-macro2",
|
|
||||||
"quote",
|
|
||||||
"syn",
|
|
||||||
"wasm-bindgen-shared",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-futures"
|
name = "wasm-bindgen-futures"
|
||||||
version = "0.4.53"
|
version = "0.4.60"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a0b221ff421256839509adbb55998214a70d829d3a28c69b4a6672e9d2a42f67"
|
checksum = "a42e96ea38f49b191e08a1bab66c7ffdba24b06f9995b39a9dd60222e5b6f1da"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cfg-if",
|
"cfg-if",
|
||||||
|
"futures-util",
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
@@ -1902,9 +1889,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro"
|
name = "wasm-bindgen-macro"
|
||||||
version = "0.2.103"
|
version = "0.2.110"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fc65f4f411d91494355917b605e1480033152658d71f722a90647f56a70c88a0"
|
checksum = "e12fdf6649048f2e3de6d7d5ff3ced779cdedee0e0baffd7dff5cdfa3abc8a52"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
@@ -1912,22 +1899,22 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-macro-support"
|
name = "wasm-bindgen-macro-support"
|
||||||
version = "0.2.103"
|
version = "0.2.110"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "ffc003a991398a8ee604a401e194b6b3a39677b3173d6e74495eb51b82e99a32"
|
checksum = "0e63d1795c565ac3462334c1e396fd46dbf481c40f51f5072c310717bc4fb309"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"bumpalo",
|
||||||
"proc-macro2",
|
"proc-macro2",
|
||||||
"quote",
|
"quote",
|
||||||
"syn",
|
"syn",
|
||||||
"wasm-bindgen-backend",
|
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "wasm-bindgen-shared"
|
name = "wasm-bindgen-shared"
|
||||||
version = "0.2.103"
|
version = "0.2.110"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "293c37f4efa430ca14db3721dfbe48d8c33308096bd44d80ebaa775ab71ba1cf"
|
checksum = "e9f9cdac23a5ce71f6bf9f8824898a501e511892791ea2a0c6b8568c68b9cb53"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
@@ -1948,9 +1935,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "web-sys"
|
name = "web-sys"
|
||||||
version = "0.3.80"
|
version = "0.3.87"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "fbe734895e869dc429d78c4b433f8d17d95f8d05317440b4fad5ab2d33e596dc"
|
checksum = "f2c7c5718134e770ee62af3b6b4a84518ec10101aad610c024b64d6ff29bb1ff"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"js-sys",
|
"js-sys",
|
||||||
"wasm-bindgen",
|
"wasm-bindgen",
|
||||||
|
|||||||
@@ -16,15 +16,15 @@ name = "buttplug_wasm"
|
|||||||
path = "src/lib.rs"
|
path = "src/lib.rs"
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
buttplug_core = { git = "https://github.com/valknarthing/buttplug.git", default-features = false, features = ["wasm"] }
|
buttplug_core = { git = "https://github.com/valknarthing/buttplug.git", rev = "fad6c9d", default-features = false, features = ["wasm"] }
|
||||||
buttplug_server = { git = "https://github.com/valknarthing/buttplug.git", default-features = false, features = ["wasm"] }
|
buttplug_server = { git = "https://github.com/valknarthing/buttplug.git", rev = "fad6c9d", default-features = false, features = ["wasm"] }
|
||||||
buttplug_server_device_config = { git = "https://github.com/valknarthing/buttplug.git" }
|
buttplug_server_device_config = { git = "https://github.com/valknarthing/buttplug.git", rev = "fad6c9d" }
|
||||||
js-sys = "0.3.80"
|
js-sys = "0.3.87"
|
||||||
tracing-wasm = "0.2.1"
|
tracing-wasm = "0.2.1"
|
||||||
log-panics = { version = "2.1.0", features = ["with-backtrace"] }
|
log-panics = { version = "2.1.0", features = ["with-backtrace"] }
|
||||||
console_error_panic_hook = "0.1.7"
|
console_error_panic_hook = "0.1.7"
|
||||||
wasmtimer = "0.4.3"
|
wasmtimer = "0.4.3"
|
||||||
wasm-bindgen = { version = "0.2.103", features = ["serde-serialize"] }
|
wasm-bindgen = { version = "0.2.110", features = ["serde-serialize"] }
|
||||||
tokio = { version = "1.47.1", features = ["sync", "macros", "io-util"] }
|
tokio = { version = "1.47.1", features = ["sync", "macros", "io-util"] }
|
||||||
tokio-stream = "0.1.17"
|
tokio-stream = "0.1.17"
|
||||||
tracing = "0.1.41"
|
tracing = "0.1.41"
|
||||||
@@ -33,12 +33,12 @@ tracing-subscriber = { version = "0.3.20", features = ["json"] }
|
|||||||
futures = "0.3.31"
|
futures = "0.3.31"
|
||||||
futures-util = "0.3.31"
|
futures-util = "0.3.31"
|
||||||
async-trait = "0.1.89"
|
async-trait = "0.1.89"
|
||||||
wasm-bindgen-futures = "0.4.53"
|
wasm-bindgen-futures = "0.4.60"
|
||||||
getrandom = { version = "0.3", features = ["wasm_js"] }
|
getrandom = { version = "0.3", features = ["wasm_js"] }
|
||||||
parking_lot = { version = "0.11.1", features = ["wasm-bindgen"]}
|
parking_lot = { version = "0.11.1", features = ["wasm-bindgen"]}
|
||||||
|
|
||||||
[dependencies.web-sys]
|
[dependencies.web-sys]
|
||||||
version = "0.3.80"
|
version = "0.3.87"
|
||||||
# path = "../../wasm-bindgen/crates/web-sys"
|
# path = "../../wasm-bindgen/crates/web-sys"
|
||||||
#git = "https://github.com/rustwasm/wasm-bindgen"
|
#git = "https://github.com/rustwasm/wasm-bindgen"
|
||||||
features = [
|
features = [
|
||||||
|
|||||||
@@ -1,25 +1,25 @@
|
|||||||
{
|
{
|
||||||
"name": "@sexy.pivoine.art/buttplug",
|
"name": "@sexy.pivoine.art/buttplug",
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"main": "./dist/index.js",
|
"main": "./dist/index.js",
|
||||||
"module": "./dist/index.js",
|
"module": "./dist/index.js",
|
||||||
"types": "./dist/index.d.ts",
|
"types": "./dist/index.d.ts",
|
||||||
"files": [
|
"files": [
|
||||||
"dist"
|
"dist"
|
||||||
],
|
],
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"build:wasm": "wasm-pack build --out-dir wasm --out-name index --target bundler --release"
|
"build:wasm": "wasm-pack build --out-dir wasm --out-name index --target bundler --release"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"eventemitter3": "^5.0.1",
|
"eventemitter3": "^5.0.4",
|
||||||
"typescript": "^5.9.2",
|
"typescript": "^5.9.3",
|
||||||
"vite": "^7.1.4",
|
"vite": "^7.3.1",
|
||||||
"vite-plugin-wasm": "3.5.0",
|
"vite-plugin-wasm": "3.5.0",
|
||||||
"ws": "^8.18.3"
|
"ws": "^8.19.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"wasm-pack": "^0.13.1"
|
"wasm-pack": "^0.14.0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,11 +6,11 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
'use strict';
|
"use strict";
|
||||||
|
|
||||||
import { IButtplugClientConnector } from './IButtplugClientConnector';
|
import { IButtplugClientConnector } from "./IButtplugClientConnector";
|
||||||
import { ButtplugMessage } from '../core/Messages';
|
import { ButtplugMessage } from "../core/Messages";
|
||||||
import { ButtplugBrowserWebsocketConnector } from '../utils/ButtplugBrowserWebsocketConnector';
|
import { ButtplugBrowserWebsocketConnector } from "../utils/ButtplugBrowserWebsocketConnector";
|
||||||
|
|
||||||
export class ButtplugBrowserWebsocketClientConnector
|
export class ButtplugBrowserWebsocketClientConnector
|
||||||
extends ButtplugBrowserWebsocketConnector
|
extends ButtplugBrowserWebsocketConnector
|
||||||
@@ -18,7 +18,7 @@ export class ButtplugBrowserWebsocketClientConnector
|
|||||||
{
|
{
|
||||||
public send = (msg: ButtplugMessage): void => {
|
public send = (msg: ButtplugMessage): void => {
|
||||||
if (!this.Connected) {
|
if (!this.Connected) {
|
||||||
throw new Error('ButtplugClient not connected');
|
throw new Error("ButtplugClient not connected");
|
||||||
}
|
}
|
||||||
this.sendMessage(msg);
|
this.sendMessage(msg);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,20 +6,16 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
'use strict';
|
"use strict";
|
||||||
|
|
||||||
import { ButtplugLogger } from '../core/Logging';
|
import { ButtplugLogger } from "../core/Logging";
|
||||||
import { EventEmitter } from 'eventemitter3';
|
import { EventEmitter } from "eventemitter3";
|
||||||
import { ButtplugClientDevice } from './ButtplugClientDevice';
|
import { ButtplugClientDevice } from "./ButtplugClientDevice";
|
||||||
import { IButtplugClientConnector } from './IButtplugClientConnector';
|
import { IButtplugClientConnector } from "./IButtplugClientConnector";
|
||||||
import { ButtplugMessageSorter } from '../utils/ButtplugMessageSorter';
|
import { ButtplugMessageSorter } from "../utils/ButtplugMessageSorter";
|
||||||
import * as Messages from '../core/Messages';
|
import * as Messages from "../core/Messages";
|
||||||
import {
|
import { ButtplugError, ButtplugInitError, ButtplugMessageError } from "../core/Exceptions";
|
||||||
ButtplugError,
|
import { ButtplugClientConnectorException } from "./ButtplugClientConnectorException";
|
||||||
ButtplugInitError,
|
|
||||||
ButtplugMessageError,
|
|
||||||
} from '../core/Exceptions';
|
|
||||||
import { ButtplugClientConnectorException } from './ButtplugClientConnectorException';
|
|
||||||
|
|
||||||
export class ButtplugClient extends EventEmitter {
|
export class ButtplugClient extends EventEmitter {
|
||||||
protected _pingTimer: NodeJS.Timeout | null = null;
|
protected _pingTimer: NodeJS.Timeout | null = null;
|
||||||
@@ -30,7 +26,7 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
protected _isScanning = false;
|
protected _isScanning = false;
|
||||||
private _sorter: ButtplugMessageSorter = new ButtplugMessageSorter(true);
|
private _sorter: ButtplugMessageSorter = new ButtplugMessageSorter(true);
|
||||||
|
|
||||||
constructor(clientName = 'Generic Buttplug Client') {
|
constructor(clientName = "Generic Buttplug Client") {
|
||||||
super();
|
super();
|
||||||
this._clientName = clientName;
|
this._clientName = clientName;
|
||||||
this._logger.Debug(`ButtplugClient: Client ${clientName} created.`);
|
this._logger.Debug(`ButtplugClient: Client ${clientName} created.`);
|
||||||
@@ -52,18 +48,16 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public connect = async (connector: IButtplugClientConnector) => {
|
public connect = async (connector: IButtplugClientConnector) => {
|
||||||
this._logger.Info(
|
this._logger.Info(`ButtplugClient: Connecting using ${connector.constructor.name}`);
|
||||||
`ButtplugClient: Connecting using ${connector.constructor.name}`
|
|
||||||
);
|
|
||||||
await connector.connect();
|
await connector.connect();
|
||||||
this._connector = connector;
|
this._connector = connector;
|
||||||
this._connector.addListener('message', this.parseMessages);
|
this._connector.addListener("message", this.parseMessages);
|
||||||
this._connector.addListener('disconnect', this.disconnectHandler);
|
this._connector.addListener("disconnect", this.disconnectHandler);
|
||||||
await this.initializeConnection();
|
await this.initializeConnection();
|
||||||
};
|
};
|
||||||
|
|
||||||
public disconnect = async () => {
|
public disconnect = async () => {
|
||||||
this._logger.Debug('ButtplugClient: Disconnect called');
|
this._logger.Debug("ButtplugClient: Disconnect called");
|
||||||
this._devices.clear();
|
this._devices.clear();
|
||||||
this.checkConnector();
|
this.checkConnector();
|
||||||
await this.shutdownConnection();
|
await this.shutdownConnection();
|
||||||
@@ -71,25 +65,33 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
};
|
};
|
||||||
|
|
||||||
public startScanning = async () => {
|
public startScanning = async () => {
|
||||||
this._logger.Debug('ButtplugClient: StartScanning called');
|
this._logger.Debug("ButtplugClient: StartScanning called");
|
||||||
this._isScanning = true;
|
this._isScanning = true;
|
||||||
await this.sendMsgExpectOk({ StartScanning: { Id: 1 } });
|
await this.sendMsgExpectOk({ StartScanning: { Id: 1 } });
|
||||||
};
|
};
|
||||||
|
|
||||||
public stopScanning = async () => {
|
public stopScanning = async () => {
|
||||||
this._logger.Debug('ButtplugClient: StopScanning called');
|
this._logger.Debug("ButtplugClient: StopScanning called");
|
||||||
this._isScanning = false;
|
this._isScanning = false;
|
||||||
await this.sendMsgExpectOk({ StopScanning: { Id: 1 } });
|
await this.sendMsgExpectOk({ StopScanning: { Id: 1 } });
|
||||||
};
|
};
|
||||||
|
|
||||||
public stopAllDevices = async () => {
|
public stopAllDevices = async () => {
|
||||||
this._logger.Debug('ButtplugClient: StopAllDevices');
|
this._logger.Debug("ButtplugClient: StopAllDevices");
|
||||||
await this.sendMsgExpectOk({ StopCmd: { Id: 1, DeviceIndex: undefined, FeatureIndex: undefined, Inputs: true, Outputs: true } });
|
await this.sendMsgExpectOk({
|
||||||
|
StopCmd: {
|
||||||
|
Id: 1,
|
||||||
|
DeviceIndex: undefined,
|
||||||
|
FeatureIndex: undefined,
|
||||||
|
Inputs: true,
|
||||||
|
Outputs: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
protected disconnectHandler = () => {
|
protected disconnectHandler = () => {
|
||||||
this._logger.Info('ButtplugClient: Disconnect event receieved.');
|
this._logger.Info("ButtplugClient: Disconnect event receieved.");
|
||||||
this.emit('disconnect');
|
this.emit("disconnect");
|
||||||
};
|
};
|
||||||
|
|
||||||
protected parseMessages = (msgs: Messages.ButtplugMessage[]) => {
|
protected parseMessages = (msgs: Messages.ButtplugMessage[]) => {
|
||||||
@@ -100,10 +102,10 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
break;
|
break;
|
||||||
} else if (x.ScanningFinished !== undefined) {
|
} else if (x.ScanningFinished !== undefined) {
|
||||||
this._isScanning = false;
|
this._isScanning = false;
|
||||||
this.emit('scanningfinished', x);
|
this.emit("scanningfinished", x);
|
||||||
} else if (x.InputReading !== undefined) {
|
} else if (x.InputReading !== undefined) {
|
||||||
// TODO this should be emitted from the device or feature, not the client
|
// TODO this should be emitted from the device or feature, not the client
|
||||||
this.emit('inputreading', x);
|
this.emit("inputreading", x);
|
||||||
} else {
|
} else {
|
||||||
console.log(`Unhandled message: ${x}`);
|
console.log(`Unhandled message: ${x}`);
|
||||||
}
|
}
|
||||||
@@ -112,21 +114,17 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
|
|
||||||
protected initializeConnection = async (): Promise<boolean> => {
|
protected initializeConnection = async (): Promise<boolean> => {
|
||||||
this.checkConnector();
|
this.checkConnector();
|
||||||
const msg = await this.sendMessage(
|
const msg = await this.sendMessage({
|
||||||
{
|
RequestServerInfo: {
|
||||||
RequestServerInfo: {
|
ClientName: this._clientName,
|
||||||
ClientName: this._clientName,
|
Id: 1,
|
||||||
Id: 1,
|
ProtocolVersionMajor: Messages.MESSAGE_SPEC_VERSION_MAJOR,
|
||||||
ProtocolVersionMajor: Messages.MESSAGE_SPEC_VERSION_MAJOR,
|
ProtocolVersionMinor: Messages.MESSAGE_SPEC_VERSION_MINOR,
|
||||||
ProtocolVersionMinor: Messages.MESSAGE_SPEC_VERSION_MINOR
|
},
|
||||||
}
|
});
|
||||||
}
|
|
||||||
);
|
|
||||||
if (msg.ServerInfo !== undefined) {
|
if (msg.ServerInfo !== undefined) {
|
||||||
const serverinfo = msg as Messages.ServerInfo;
|
const serverinfo = msg as Messages.ServerInfo;
|
||||||
this._logger.Info(
|
this._logger.Info(`ButtplugClient: Connected to Server ${serverinfo.ServerName}`);
|
||||||
`ButtplugClient: Connected to Server ${serverinfo.ServerName}`
|
|
||||||
);
|
|
||||||
// TODO: maybe store server name, do something with message template version?
|
// TODO: maybe store server name, do something with message template version?
|
||||||
const ping = serverinfo.MaxPingTime;
|
const ping = serverinfo.MaxPingTime;
|
||||||
// If the server version is lower than the client version, the server will disconnect here.
|
// If the server version is lower than the client version, the server will disconnect here.
|
||||||
@@ -153,22 +151,19 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
throw ButtplugError.LogAndError(
|
throw ButtplugError.LogAndError(
|
||||||
ButtplugInitError,
|
ButtplugInitError,
|
||||||
this._logger,
|
this._logger,
|
||||||
`Cannot connect to server. ${err.ErrorMessage}`
|
`Cannot connect to server. ${err.ErrorMessage}`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
};
|
||||||
|
|
||||||
private parseDeviceList = (list: Messages.DeviceList) => {
|
private parseDeviceList = (list: Messages.DeviceList) => {
|
||||||
for (let [_, d] of Object.entries(list.Devices)) {
|
for (let [_, d] of Object.entries(list.Devices)) {
|
||||||
if (!this._devices.has(d.DeviceIndex)) {
|
if (!this._devices.has(d.DeviceIndex)) {
|
||||||
const device = ButtplugClientDevice.fromMsg(
|
const device = ButtplugClientDevice.fromMsg(d, this.sendMessageClosure);
|
||||||
d,
|
|
||||||
this.sendMessageClosure
|
|
||||||
);
|
|
||||||
this._logger.Debug(`ButtplugClient: Adding Device: ${device}`);
|
this._logger.Debug(`ButtplugClient: Adding Device: ${device}`);
|
||||||
this._devices.set(d.DeviceIndex, device);
|
this._devices.set(d.DeviceIndex, device);
|
||||||
this.emit('deviceadded', device);
|
this.emit("deviceadded", device);
|
||||||
} else {
|
} else {
|
||||||
this._logger.Debug(`ButtplugClient: Device already added: ${d}`);
|
this._logger.Debug(`ButtplugClient: Device already added: ${d}`);
|
||||||
}
|
}
|
||||||
@@ -176,19 +171,17 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
for (let [index, device] of this._devices.entries()) {
|
for (let [index, device] of this._devices.entries()) {
|
||||||
if (!list.Devices.hasOwnProperty(index.toString())) {
|
if (!list.Devices.hasOwnProperty(index.toString())) {
|
||||||
this._devices.delete(index);
|
this._devices.delete(index);
|
||||||
this.emit('deviceremoved', device);
|
this.emit("deviceremoved", device);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
protected requestDeviceList = async () => {
|
protected requestDeviceList = async () => {
|
||||||
this.checkConnector();
|
this.checkConnector();
|
||||||
this._logger.Debug('ButtplugClient: ReceiveDeviceList called');
|
this._logger.Debug("ButtplugClient: ReceiveDeviceList called");
|
||||||
const response = (await this.sendMessage(
|
const response = await this.sendMessage({
|
||||||
{
|
RequestDeviceList: { Id: 1 },
|
||||||
RequestDeviceList: { Id: 1 }
|
});
|
||||||
}
|
|
||||||
));
|
|
||||||
this.parseDeviceList(response.DeviceList!);
|
this.parseDeviceList(response.DeviceList!);
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -200,9 +193,7 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
protected async sendMessage(
|
protected async sendMessage(msg: Messages.ButtplugMessage): Promise<Messages.ButtplugMessage> {
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
): Promise<Messages.ButtplugMessage> {
|
|
||||||
this.checkConnector();
|
this.checkConnector();
|
||||||
const p = this._sorter.PrepareOutgoingMessage(msg);
|
const p = this._sorter.PrepareOutgoingMessage(msg);
|
||||||
await this._connector!.send(msg);
|
await this._connector!.send(msg);
|
||||||
@@ -211,15 +202,11 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
|
|
||||||
protected checkConnector() {
|
protected checkConnector() {
|
||||||
if (!this.connected) {
|
if (!this.connected) {
|
||||||
throw new ButtplugClientConnectorException(
|
throw new ButtplugClientConnectorException("ButtplugClient not connected");
|
||||||
'ButtplugClient not connected'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected sendMsgExpectOk = async (
|
protected sendMsgExpectOk = async (msg: Messages.ButtplugMessage): Promise<void> => {
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
): Promise<void> => {
|
|
||||||
const response = await this.sendMessage(msg);
|
const response = await this.sendMessage(msg);
|
||||||
if (response.Ok !== undefined) {
|
if (response.Ok !== undefined) {
|
||||||
return;
|
return;
|
||||||
@@ -229,13 +216,13 @@ export class ButtplugClient extends EventEmitter {
|
|||||||
throw ButtplugError.LogAndError(
|
throw ButtplugError.LogAndError(
|
||||||
ButtplugMessageError,
|
ButtplugMessageError,
|
||||||
this._logger,
|
this._logger,
|
||||||
`Message ${response} not handled by SendMsgExpectOk`
|
`Message ${response} not handled by SendMsgExpectOk`,
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
protected sendMessageClosure = async (
|
protected sendMessageClosure = async (
|
||||||
msg: Messages.ButtplugMessage
|
msg: Messages.ButtplugMessage,
|
||||||
): Promise<Messages.ButtplugMessage> => {
|
): Promise<Messages.ButtplugMessage> => {
|
||||||
return await this.sendMessage(msg);
|
return await this.sendMessage(msg);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { ButtplugError } from '../core/Exceptions';
|
import { ButtplugError } from "../core/Exceptions";
|
||||||
import * as Messages from '../core/Messages';
|
import * as Messages from "../core/Messages";
|
||||||
|
|
||||||
export class ButtplugClientConnectorException extends ButtplugError {
|
export class ButtplugClientConnectorException extends ButtplugError {
|
||||||
public constructor(message: string) {
|
public constructor(message: string) {
|
||||||
|
|||||||
@@ -6,22 +6,17 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
'use strict';
|
"use strict";
|
||||||
import * as Messages from '../core/Messages';
|
import * as Messages from "../core/Messages";
|
||||||
import {
|
import { ButtplugDeviceError, ButtplugError, ButtplugMessageError } from "../core/Exceptions";
|
||||||
ButtplugDeviceError,
|
import { EventEmitter } from "eventemitter3";
|
||||||
ButtplugError,
|
import { ButtplugClientDeviceFeature } from "./ButtplugClientDeviceFeature";
|
||||||
ButtplugMessageError,
|
import { DeviceOutputCommand } from "./ButtplugClientDeviceCommand";
|
||||||
} from '../core/Exceptions';
|
|
||||||
import { EventEmitter } from 'eventemitter3';
|
|
||||||
import { ButtplugClientDeviceFeature } from './ButtplugClientDeviceFeature';
|
|
||||||
import { DeviceOutputCommand } from './ButtplugClientDeviceCommand';
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents an abstract device, capable of taking certain kinds of messages.
|
* Represents an abstract device, capable of taking certain kinds of messages.
|
||||||
*/
|
*/
|
||||||
export class ButtplugClientDevice extends EventEmitter {
|
export class ButtplugClientDevice extends EventEmitter {
|
||||||
|
|
||||||
private _features: Map<number, ButtplugClientDeviceFeature>;
|
private _features: Map<number, ButtplugClientDeviceFeature>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -58,9 +53,7 @@ export class ButtplugClientDevice extends EventEmitter {
|
|||||||
|
|
||||||
public static fromMsg(
|
public static fromMsg(
|
||||||
msg: Messages.DeviceInfo,
|
msg: Messages.DeviceInfo,
|
||||||
sendClosure: (
|
sendClosure: (msg: Messages.ButtplugMessage) => Promise<Messages.ButtplugMessage>,
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
) => Promise<Messages.ButtplugMessage>
|
|
||||||
): ButtplugClientDevice {
|
): ButtplugClientDevice {
|
||||||
return new ButtplugClientDevice(msg, sendClosure);
|
return new ButtplugClientDevice(msg, sendClosure);
|
||||||
}
|
}
|
||||||
@@ -72,25 +65,29 @@ export class ButtplugClientDevice extends EventEmitter {
|
|||||||
*/
|
*/
|
||||||
private constructor(
|
private constructor(
|
||||||
private _deviceInfo: Messages.DeviceInfo,
|
private _deviceInfo: Messages.DeviceInfo,
|
||||||
private _sendClosure: (
|
private _sendClosure: (msg: Messages.ButtplugMessage) => Promise<Messages.ButtplugMessage>,
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
) => Promise<Messages.ButtplugMessage>
|
|
||||||
) {
|
) {
|
||||||
super();
|
super();
|
||||||
this._features = new Map(Object.entries(_deviceInfo.DeviceFeatures).map(([index, v]) => [parseInt(index), new ButtplugClientDeviceFeature(_deviceInfo.DeviceIndex, _deviceInfo.DeviceName, v, _sendClosure)]));
|
this._features = new Map(
|
||||||
|
Object.entries(_deviceInfo.DeviceFeatures).map(([index, v]) => [
|
||||||
|
parseInt(index),
|
||||||
|
new ButtplugClientDeviceFeature(
|
||||||
|
_deviceInfo.DeviceIndex,
|
||||||
|
_deviceInfo.DeviceName,
|
||||||
|
v,
|
||||||
|
_sendClosure,
|
||||||
|
),
|
||||||
|
]),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async send(
|
public async send(msg: Messages.ButtplugMessage): Promise<Messages.ButtplugMessage> {
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
): Promise<Messages.ButtplugMessage> {
|
|
||||||
// Assume we're getting the closure from ButtplugClient, which does all of
|
// Assume we're getting the closure from ButtplugClient, which does all of
|
||||||
// the index/existence/connection/message checks for us.
|
// the index/existence/connection/message checks for us.
|
||||||
return await this._sendClosure(msg);
|
return await this._sendClosure(msg);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected sendMsgExpectOk = async (
|
protected sendMsgExpectOk = async (msg: Messages.ButtplugMessage): Promise<void> => {
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
): Promise<void> => {
|
|
||||||
const response = await this.send(msg);
|
const response = await this.send(msg);
|
||||||
if (response.Ok !== undefined) {
|
if (response.Ok !== undefined) {
|
||||||
return;
|
return;
|
||||||
@@ -109,19 +106,36 @@ export class ButtplugClientDevice extends EventEmitter {
|
|||||||
|
|
||||||
protected isOutputValid(featureIndex: number, type: Messages.OutputType) {
|
protected isOutputValid(featureIndex: number, type: Messages.OutputType) {
|
||||||
if (!this._deviceInfo.DeviceFeatures.hasOwnProperty(featureIndex.toString())) {
|
if (!this._deviceInfo.DeviceFeatures.hasOwnProperty(featureIndex.toString())) {
|
||||||
throw new ButtplugDeviceError(`Feature index ${featureIndex} does not exist for device ${this.name}`);
|
throw new ButtplugDeviceError(
|
||||||
|
`Feature index ${featureIndex} does not exist for device ${this.name}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
if (this._deviceInfo.DeviceFeatures[featureIndex.toString()].Outputs !== undefined && !this._deviceInfo.DeviceFeatures[featureIndex.toString()].Outputs.hasOwnProperty(type)) {
|
if (
|
||||||
throw new ButtplugDeviceError(`Feature index ${featureIndex} does not support type ${type} for device ${this.name}`);
|
this._deviceInfo.DeviceFeatures[featureIndex.toString()].Outputs !== undefined &&
|
||||||
|
!this._deviceInfo.DeviceFeatures[featureIndex.toString()].Outputs.hasOwnProperty(type)
|
||||||
|
) {
|
||||||
|
throw new ButtplugDeviceError(
|
||||||
|
`Feature index ${featureIndex} does not support type ${type} for device ${this.name}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public hasOutput(type: Messages.OutputType): boolean {
|
public hasOutput(type: Messages.OutputType): boolean {
|
||||||
return this._features.values().filter((f) => f.hasOutput(type)).toArray().length > 0;
|
return (
|
||||||
|
this._features
|
||||||
|
.values()
|
||||||
|
.filter((f) => f.hasOutput(type))
|
||||||
|
.toArray().length > 0
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public hasInput(type: Messages.InputType): boolean {
|
public hasInput(type: Messages.InputType): boolean {
|
||||||
return this._features.values().filter((f) => f.hasInput(type)).toArray().length > 0;
|
return (
|
||||||
|
this._features
|
||||||
|
.values()
|
||||||
|
.filter((f) => f.hasInput(type))
|
||||||
|
.toArray().length > 0
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async runOutput(cmd: DeviceOutputCommand): Promise<void> {
|
public async runOutput(cmd: DeviceOutputCommand): Promise<void> {
|
||||||
@@ -138,7 +152,15 @@ export class ButtplugClientDevice extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async stop(): Promise<void> {
|
public async stop(): Promise<void> {
|
||||||
await this.sendMsgExpectOk({StopCmd: { Id: 1, DeviceIndex: this.index, FeatureIndex: undefined, Inputs: true, Outputs: true}});
|
await this.sendMsgExpectOk({
|
||||||
|
StopCmd: {
|
||||||
|
Id: 1,
|
||||||
|
DeviceIndex: this.index,
|
||||||
|
FeatureIndex: undefined,
|
||||||
|
Inputs: true,
|
||||||
|
Outputs: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
public async battery(): Promise<number> {
|
public async battery(): Promise<number> {
|
||||||
@@ -160,6 +182,6 @@ export class ButtplugClientDevice extends EventEmitter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public emitDisconnected() {
|
public emitDisconnected() {
|
||||||
this.emit('deviceremoved');
|
this.emit("deviceremoved");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ class PercentOrSteps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static createSteps(s: number): PercentOrSteps {
|
public static createSteps(s: number): PercentOrSteps {
|
||||||
let v = new PercentOrSteps;
|
let v = new PercentOrSteps();
|
||||||
v._steps = s;
|
v._steps = s;
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
@@ -24,7 +24,7 @@ class PercentOrSteps {
|
|||||||
throw new ButtplugDeviceError(`Percent value ${p} is not in the range 0.0 <= x <= 1.0`);
|
throw new ButtplugDeviceError(`Percent value ${p} is not in the range 0.0 <= x <= 1.0`);
|
||||||
}
|
}
|
||||||
|
|
||||||
let v = new PercentOrSteps;
|
let v = new PercentOrSteps();
|
||||||
v._percent = p;
|
v._percent = p;
|
||||||
return v;
|
return v;
|
||||||
}
|
}
|
||||||
@@ -35,8 +35,7 @@ export class DeviceOutputCommand {
|
|||||||
private _outputType: OutputType,
|
private _outputType: OutputType,
|
||||||
private _value: PercentOrSteps,
|
private _value: PercentOrSteps,
|
||||||
private _duration?: number,
|
private _duration?: number,
|
||||||
)
|
) {}
|
||||||
{}
|
|
||||||
|
|
||||||
public get outputType() {
|
public get outputType() {
|
||||||
return this._outputType;
|
return this._outputType;
|
||||||
@@ -52,26 +51,36 @@ export class DeviceOutputCommand {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export class DeviceOutputValueConstructor {
|
export class DeviceOutputValueConstructor {
|
||||||
public constructor(
|
public constructor(private _outputType: OutputType) {}
|
||||||
private _outputType: OutputType)
|
|
||||||
{}
|
|
||||||
|
|
||||||
public steps(steps: number): DeviceOutputCommand {
|
public steps(steps: number): DeviceOutputCommand {
|
||||||
return new DeviceOutputCommand(this._outputType, PercentOrSteps.createSteps(steps), undefined);
|
return new DeviceOutputCommand(this._outputType, PercentOrSteps.createSteps(steps), undefined);
|
||||||
}
|
}
|
||||||
|
|
||||||
public percent(percent: number): DeviceOutputCommand {
|
public percent(percent: number): DeviceOutputCommand {
|
||||||
return new DeviceOutputCommand(this._outputType, PercentOrSteps.createPercent(percent), undefined);
|
return new DeviceOutputCommand(
|
||||||
|
this._outputType,
|
||||||
|
PercentOrSteps.createPercent(percent),
|
||||||
|
undefined,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class DeviceOutputPositionWithDurationConstructor {
|
export class DeviceOutputPositionWithDurationConstructor {
|
||||||
public steps(steps: number, duration: number): DeviceOutputCommand {
|
public steps(steps: number, duration: number): DeviceOutputCommand {
|
||||||
return new DeviceOutputCommand(OutputType.Position, PercentOrSteps.createSteps(steps), duration);
|
return new DeviceOutputCommand(
|
||||||
|
OutputType.Position,
|
||||||
|
PercentOrSteps.createSteps(steps),
|
||||||
|
duration,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
public percent(percent: number, duration: number): DeviceOutputCommand {
|
public percent(percent: number, duration: number): DeviceOutputCommand {
|
||||||
return new DeviceOutputCommand(OutputType.HwPositionWithDuration, PercentOrSteps.createPercent(percent), duration);
|
return new DeviceOutputCommand(
|
||||||
|
OutputType.HwPositionWithDuration,
|
||||||
|
PercentOrSteps.createPercent(percent),
|
||||||
|
duration,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -3,23 +3,18 @@ import * as Messages from "../core/Messages";
|
|||||||
import { DeviceOutputCommand } from "./ButtplugClientDeviceCommand";
|
import { DeviceOutputCommand } from "./ButtplugClientDeviceCommand";
|
||||||
|
|
||||||
export class ButtplugClientDeviceFeature {
|
export class ButtplugClientDeviceFeature {
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
private _deviceIndex: number,
|
private _deviceIndex: number,
|
||||||
private _deviceName: string,
|
private _deviceName: string,
|
||||||
private _feature: Messages.DeviceFeature,
|
private _feature: Messages.DeviceFeature,
|
||||||
private _sendClosure: (
|
private _sendClosure: (msg: Messages.ButtplugMessage) => Promise<Messages.ButtplugMessage>,
|
||||||
msg: Messages.ButtplugMessage
|
) {}
|
||||||
) => Promise<Messages.ButtplugMessage>) {
|
|
||||||
}
|
|
||||||
|
|
||||||
protected send = async (msg: Messages.ButtplugMessage): Promise<Messages.ButtplugMessage> => {
|
protected send = async (msg: Messages.ButtplugMessage): Promise<Messages.ButtplugMessage> => {
|
||||||
return await this._sendClosure(msg);
|
return await this._sendClosure(msg);
|
||||||
}
|
};
|
||||||
|
|
||||||
protected sendMsgExpectOk = async (
|
protected sendMsgExpectOk = async (msg: Messages.ButtplugMessage): Promise<void> => {
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
): Promise<void> => {
|
|
||||||
const response = await this.send(msg);
|
const response = await this.send(msg);
|
||||||
if (response.Ok !== undefined) {
|
if (response.Ok !== undefined) {
|
||||||
return;
|
return;
|
||||||
@@ -32,13 +27,17 @@ export class ButtplugClientDeviceFeature {
|
|||||||
|
|
||||||
protected isOutputValid(type: Messages.OutputType) {
|
protected isOutputValid(type: Messages.OutputType) {
|
||||||
if (this._feature.Output !== undefined && !this._feature.Output.hasOwnProperty(type)) {
|
if (this._feature.Output !== undefined && !this._feature.Output.hasOwnProperty(type)) {
|
||||||
throw new ButtplugDeviceError(`Feature index ${this._feature.FeatureIndex} does not support type ${type} for device ${this._deviceName}`);
|
throw new ButtplugDeviceError(
|
||||||
|
`Feature index ${this._feature.FeatureIndex} does not support type ${type} for device ${this._deviceName}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
protected isInputValid(type: Messages.InputType) {
|
protected isInputValid(type: Messages.InputType) {
|
||||||
if (this._feature.Input !== undefined && !this._feature.Input.hasOwnProperty(type)) {
|
if (this._feature.Input !== undefined && !this._feature.Input.hasOwnProperty(type)) {
|
||||||
throw new ButtplugDeviceError(`Feature index ${this._feature.FeatureIndex} does not support type ${type} for device ${this._deviceName}`);
|
throw new ButtplugDeviceError(
|
||||||
|
`Feature index ${this._feature.FeatureIndex} does not support type ${type} for device ${this._deviceName}`,
|
||||||
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -74,8 +73,8 @@ export class ButtplugClientDeviceFeature {
|
|||||||
Id: 1,
|
Id: 1,
|
||||||
DeviceIndex: this._deviceIndex,
|
DeviceIndex: this._deviceIndex,
|
||||||
FeatureIndex: this._feature.FeatureIndex,
|
FeatureIndex: this._feature.FeatureIndex,
|
||||||
Command: outCommand
|
Command: outCommand,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
await this.sendMsgExpectOk(cmd);
|
await this.sendMsgExpectOk(cmd);
|
||||||
}
|
}
|
||||||
@@ -124,20 +123,29 @@ export class ButtplugClientDeviceFeature {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
public async runOutput(cmd: DeviceOutputCommand): Promise<void> {
|
public async runOutput(cmd: DeviceOutputCommand): Promise<void> {
|
||||||
if (this._feature.Output !== undefined && this._feature.Output.hasOwnProperty(cmd.outputType.toString())) {
|
if (
|
||||||
|
this._feature.Output !== undefined &&
|
||||||
|
this._feature.Output.hasOwnProperty(cmd.outputType.toString())
|
||||||
|
) {
|
||||||
return this.sendOutputCmd(cmd);
|
return this.sendOutputCmd(cmd);
|
||||||
}
|
}
|
||||||
throw new ButtplugDeviceError(`Output type ${cmd.outputType} not supported by feature.`);
|
throw new ButtplugDeviceError(`Output type ${cmd.outputType} not supported by feature.`);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async runInput(inputType: Messages.InputType, inputCommand: Messages.InputCommandType): Promise<Messages.InputReading | undefined> {
|
public async runInput(
|
||||||
|
inputType: Messages.InputType,
|
||||||
|
inputCommand: Messages.InputCommandType,
|
||||||
|
): Promise<Messages.InputReading | undefined> {
|
||||||
// Make sure the requested feature is valid
|
// Make sure the requested feature is valid
|
||||||
this.isInputValid(inputType);
|
this.isInputValid(inputType);
|
||||||
let inputAttributes = this._feature.Input[inputType];
|
let inputAttributes = this._feature.Input[inputType];
|
||||||
console.log(this._feature.Input);
|
console.log(this._feature.Input);
|
||||||
if ((inputCommand === Messages.InputCommandType.Unsubscribe && !inputAttributes.Command.includes(Messages.InputCommandType.Subscribe)) && !inputAttributes.Command.includes(inputCommand)) {
|
if (
|
||||||
|
inputCommand === Messages.InputCommandType.Unsubscribe &&
|
||||||
|
!inputAttributes.Command.includes(Messages.InputCommandType.Subscribe) &&
|
||||||
|
!inputAttributes.Command.includes(inputCommand)
|
||||||
|
) {
|
||||||
throw new ButtplugDeviceError(`${inputType} does not support command ${inputCommand}`);
|
throw new ButtplugDeviceError(`${inputType} does not support command ${inputCommand}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -148,7 +156,7 @@ export class ButtplugClientDeviceFeature {
|
|||||||
FeatureIndex: this._feature.FeatureIndex,
|
FeatureIndex: this._feature.FeatureIndex,
|
||||||
Type: inputType,
|
Type: inputType,
|
||||||
Command: inputCommand,
|
Command: inputCommand,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
if (inputCommand == Messages.InputCommandType.Read) {
|
if (inputCommand == Messages.InputCommandType.Read) {
|
||||||
const response = await this.send(cmd);
|
const response = await this.send(cmd);
|
||||||
|
|||||||
@@ -6,12 +6,11 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
'use strict';
|
"use strict";
|
||||||
|
|
||||||
import { ButtplugBrowserWebsocketClientConnector } from './ButtplugBrowserWebsocketClientConnector';
|
import { ButtplugBrowserWebsocketClientConnector } from "./ButtplugBrowserWebsocketClientConnector";
|
||||||
import { WebSocket as NodeWebSocket } from 'ws';
|
import { WebSocket as NodeWebSocket } from "ws";
|
||||||
|
|
||||||
export class ButtplugNodeWebsocketClientConnector extends ButtplugBrowserWebsocketClientConnector {
|
export class ButtplugNodeWebsocketClientConnector extends ButtplugBrowserWebsocketClientConnector {
|
||||||
protected _websocketConstructor =
|
protected _websocketConstructor = NodeWebSocket as unknown as typeof WebSocket;
|
||||||
NodeWebSocket as unknown as typeof WebSocket;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { ButtplugMessage } from '../core/Messages';
|
import { ButtplugMessage } from "../core/Messages";
|
||||||
import { EventEmitter } from 'eventemitter3';
|
import { EventEmitter } from "eventemitter3";
|
||||||
|
|
||||||
export interface IButtplugClientConnector extends EventEmitter {
|
export interface IButtplugClientConnector extends EventEmitter {
|
||||||
connect: () => Promise<void>;
|
connect: () => Promise<void>;
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import * as Messages from './Messages';
|
import * as Messages from "./Messages";
|
||||||
import { ButtplugLogger } from './Logging';
|
import { ButtplugLogger } from "./Logging";
|
||||||
|
|
||||||
export class ButtplugError extends Error {
|
export class ButtplugError extends Error {
|
||||||
public get ErrorClass(): Messages.ErrorClass {
|
public get ErrorClass(): Messages.ErrorClass {
|
||||||
@@ -27,16 +27,16 @@ export class ButtplugError extends Error {
|
|||||||
Error: {
|
Error: {
|
||||||
Id: this.Id,
|
Id: this.Id,
|
||||||
ErrorCode: this.ErrorClass,
|
ErrorCode: this.ErrorClass,
|
||||||
ErrorMessage: this.message
|
ErrorMessage: this.message,
|
||||||
}
|
},
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public static LogAndError<T extends ButtplugError>(
|
public static LogAndError<T extends ButtplugError>(
|
||||||
constructor: new (str: string, num: number) => T,
|
constructor: new (str: string, num: number) => T,
|
||||||
logger: ButtplugLogger,
|
logger: ButtplugLogger,
|
||||||
message: string,
|
message: string,
|
||||||
id: number = Messages.SYSTEM_MESSAGE_ID
|
id: number = Messages.SYSTEM_MESSAGE_ID,
|
||||||
): T {
|
): T {
|
||||||
logger.Error(message);
|
logger.Error(message);
|
||||||
return new constructor(message, id);
|
return new constructor(message, id);
|
||||||
@@ -67,7 +67,7 @@ export class ButtplugError extends Error {
|
|||||||
message: string,
|
message: string,
|
||||||
errorClass: Messages.ErrorClass,
|
errorClass: Messages.ErrorClass,
|
||||||
id: number = Messages.SYSTEM_MESSAGE_ID,
|
id: number = Messages.SYSTEM_MESSAGE_ID,
|
||||||
inner?: Error
|
inner?: Error,
|
||||||
) {
|
) {
|
||||||
super(message);
|
super(message);
|
||||||
this.errorClass = errorClass;
|
this.errorClass = errorClass;
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { EventEmitter } from 'eventemitter3';
|
import { EventEmitter } from "eventemitter3";
|
||||||
|
|
||||||
export enum ButtplugLogLevel {
|
export enum ButtplugLogLevel {
|
||||||
Off,
|
Off,
|
||||||
@@ -69,9 +69,7 @@ export class LogMessage {
|
|||||||
* Returns a formatted string with timestamp, level, and message.
|
* Returns a formatted string with timestamp, level, and message.
|
||||||
*/
|
*/
|
||||||
public get FormattedMessage() {
|
public get FormattedMessage() {
|
||||||
return `${ButtplugLogLevel[this.logLevel]} : ${this.timestamp} : ${
|
return `${ButtplugLogLevel[this.logLevel]} : ${this.timestamp} : ${this.logMessage}`;
|
||||||
this.logMessage
|
|
||||||
}`;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -176,10 +174,7 @@ export class ButtplugLogger extends EventEmitter {
|
|||||||
*/
|
*/
|
||||||
protected AddLogMessage(msg: string, level: ButtplugLogLevel) {
|
protected AddLogMessage(msg: string, level: ButtplugLogLevel) {
|
||||||
// If nothing wants the log message we have, ignore it.
|
// If nothing wants the log message we have, ignore it.
|
||||||
if (
|
if (level > this.maximumEventLogLevel && level > this.maximumConsoleLogLevel) {
|
||||||
level > this.maximumEventLogLevel &&
|
|
||||||
level > this.maximumConsoleLogLevel
|
|
||||||
) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const logMsg = new LogMessage(msg, level);
|
const logMsg = new LogMessage(msg, level);
|
||||||
@@ -191,7 +186,7 @@ export class ButtplugLogger extends EventEmitter {
|
|||||||
console.log(logMsg.FormattedMessage);
|
console.log(logMsg.FormattedMessage);
|
||||||
}
|
}
|
||||||
if (level <= this.maximumEventLogLevel) {
|
if (level <= this.maximumEventLogLevel) {
|
||||||
this.emit('log', logMsg);
|
this.emit("log", logMsg);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,9 +7,9 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
// tslint:disable:max-classes-per-file
|
// tslint:disable:max-classes-per-file
|
||||||
'use strict';
|
"use strict";
|
||||||
|
|
||||||
import { ButtplugMessageError } from './Exceptions';
|
import { ButtplugMessageError } from "./Exceptions";
|
||||||
|
|
||||||
export const SYSTEM_MESSAGE_ID = 0;
|
export const SYSTEM_MESSAGE_ID = 0;
|
||||||
export const DEFAULT_MESSAGE_ID = 1;
|
export const DEFAULT_MESSAGE_ID = 1;
|
||||||
@@ -132,34 +132,34 @@ export interface DeviceList {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export enum OutputType {
|
export enum OutputType {
|
||||||
Unknown = 'Unknown',
|
Unknown = "Unknown",
|
||||||
Vibrate = 'Vibrate',
|
Vibrate = "Vibrate",
|
||||||
Rotate = 'Rotate',
|
Rotate = "Rotate",
|
||||||
Oscillate = 'Oscillate',
|
Oscillate = "Oscillate",
|
||||||
Constrict = 'Constrict',
|
Constrict = "Constrict",
|
||||||
Inflate = 'Inflate',
|
Inflate = "Inflate",
|
||||||
Position = 'Position',
|
Position = "Position",
|
||||||
HwPositionWithDuration = 'HwPositionWithDuration',
|
HwPositionWithDuration = "HwPositionWithDuration",
|
||||||
Temperature = 'Temperature',
|
Temperature = "Temperature",
|
||||||
Spray = 'Spray',
|
Spray = "Spray",
|
||||||
Led = 'Led',
|
Led = "Led",
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum InputType {
|
export enum InputType {
|
||||||
Unknown = 'Unknown',
|
Unknown = "Unknown",
|
||||||
Battery = 'Battery',
|
Battery = "Battery",
|
||||||
RSSI = 'RSSI',
|
RSSI = "RSSI",
|
||||||
Button = 'Button',
|
Button = "Button",
|
||||||
Pressure = 'Pressure',
|
Pressure = "Pressure",
|
||||||
// Temperature,
|
// Temperature,
|
||||||
// Accelerometer,
|
// Accelerometer,
|
||||||
// Gyro,
|
// Gyro,
|
||||||
}
|
}
|
||||||
|
|
||||||
export enum InputCommandType {
|
export enum InputCommandType {
|
||||||
Read = 'Read',
|
Read = "Read",
|
||||||
Subscribe = 'Subscribe',
|
Subscribe = "Subscribe",
|
||||||
Unsubscribe = 'Unsubscribe',
|
Unsubscribe = "Unsubscribe",
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface DeviceFeatureInput {
|
export interface DeviceFeatureInput {
|
||||||
|
|||||||
4
packages/buttplug/src/core/index.d.ts
vendored
4
packages/buttplug/src/core/index.d.ts
vendored
@@ -1,4 +1,4 @@
|
|||||||
declare module "*.json" {
|
declare module "*.json" {
|
||||||
const content: string;
|
const content: string;
|
||||||
export default content;
|
export default content;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,27 +6,24 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import { ButtplugMessage } from './core/Messages';
|
import { ButtplugMessage } from "./core/Messages";
|
||||||
import { IButtplugClientConnector } from './client/IButtplugClientConnector';
|
import { IButtplugClientConnector } from "./client/IButtplugClientConnector";
|
||||||
import { EventEmitter } from 'eventemitter3';
|
import { EventEmitter } from "eventemitter3";
|
||||||
|
|
||||||
export * from './client/ButtplugClient';
|
export * from "./client/ButtplugClient";
|
||||||
export * from './client/ButtplugClientDevice';
|
export * from "./client/ButtplugClientDevice";
|
||||||
export * from './client/ButtplugBrowserWebsocketClientConnector';
|
export * from "./client/ButtplugBrowserWebsocketClientConnector";
|
||||||
export * from './client/ButtplugNodeWebsocketClientConnector';
|
export * from "./client/ButtplugNodeWebsocketClientConnector";
|
||||||
export * from './client/ButtplugClientConnectorException';
|
export * from "./client/ButtplugClientConnectorException";
|
||||||
export * from './utils/ButtplugMessageSorter';
|
export * from "./utils/ButtplugMessageSorter";
|
||||||
export * from './client/ButtplugClientDeviceCommand';
|
export * from "./client/ButtplugClientDeviceCommand";
|
||||||
export * from './client/ButtplugClientDeviceFeature';
|
export * from "./client/ButtplugClientDeviceFeature";
|
||||||
export * from './client/IButtplugClientConnector';
|
export * from "./client/IButtplugClientConnector";
|
||||||
export * from './core/Messages';
|
export * from "./core/Messages";
|
||||||
export * from './core/Logging';
|
export * from "./core/Logging";
|
||||||
export * from './core/Exceptions';
|
export * from "./core/Exceptions";
|
||||||
|
|
||||||
export class ButtplugWasmClientConnector
|
export class ButtplugWasmClientConnector extends EventEmitter implements IButtplugClientConnector {
|
||||||
extends EventEmitter
|
|
||||||
implements IButtplugClientConnector
|
|
||||||
{
|
|
||||||
private static _loggingActivated = false;
|
private static _loggingActivated = false;
|
||||||
private static wasmInstance;
|
private static wasmInstance;
|
||||||
private _connected: boolean = false;
|
private _connected: boolean = false;
|
||||||
@@ -43,35 +40,30 @@ export class ButtplugWasmClientConnector
|
|||||||
|
|
||||||
private static maybeLoadWasm = async () => {
|
private static maybeLoadWasm = async () => {
|
||||||
if (ButtplugWasmClientConnector.wasmInstance == undefined) {
|
if (ButtplugWasmClientConnector.wasmInstance == undefined) {
|
||||||
ButtplugWasmClientConnector.wasmInstance = await import(
|
ButtplugWasmClientConnector.wasmInstance = await import("../wasm/index.js");
|
||||||
'../wasm/index.js'
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
public static activateLogging = async (logLevel: string = 'debug') => {
|
public static activateLogging = async (logLevel: string = "debug") => {
|
||||||
await ButtplugWasmClientConnector.maybeLoadWasm();
|
await ButtplugWasmClientConnector.maybeLoadWasm();
|
||||||
if (this._loggingActivated) {
|
if (this._loggingActivated) {
|
||||||
console.log('Logging already activated, ignoring.');
|
console.log("Logging already activated, ignoring.");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
console.log('Turning on logging.');
|
console.log("Turning on logging.");
|
||||||
ButtplugWasmClientConnector.wasmInstance.buttplug_activate_env_logger(
|
ButtplugWasmClientConnector.wasmInstance.buttplug_activate_env_logger(logLevel);
|
||||||
logLevel,
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
public initialize = async (): Promise<void> => {};
|
public initialize = async (): Promise<void> => {};
|
||||||
|
|
||||||
public connect = async (): Promise<void> => {
|
public connect = async (): Promise<void> => {
|
||||||
await ButtplugWasmClientConnector.maybeLoadWasm();
|
await ButtplugWasmClientConnector.maybeLoadWasm();
|
||||||
this.client =
|
this.client = ButtplugWasmClientConnector.wasmInstance.buttplug_create_embedded_wasm_server(
|
||||||
ButtplugWasmClientConnector.wasmInstance.buttplug_create_embedded_wasm_server(
|
(msgs) => {
|
||||||
(msgs) => {
|
this.emitMessage(msgs);
|
||||||
this.emitMessage(msgs);
|
},
|
||||||
},
|
this.serverPtr,
|
||||||
this.serverPtr,
|
);
|
||||||
);
|
|
||||||
this._connected = true;
|
this._connected = true;
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -80,7 +72,7 @@ export class ButtplugWasmClientConnector
|
|||||||
public send = (msg: ButtplugMessage): void => {
|
public send = (msg: ButtplugMessage): void => {
|
||||||
ButtplugWasmClientConnector.wasmInstance.buttplug_client_send_json_message(
|
ButtplugWasmClientConnector.wasmInstance.buttplug_client_send_json_message(
|
||||||
this.client,
|
this.client,
|
||||||
new TextEncoder().encode('[' + JSON.stringify(msg) + ']'),
|
new TextEncoder().encode("[" + JSON.stringify(msg) + "]"),
|
||||||
(output) => {
|
(output) => {
|
||||||
this.emitMessage(output);
|
this.emitMessage(output);
|
||||||
},
|
},
|
||||||
@@ -90,6 +82,6 @@ export class ButtplugWasmClientConnector
|
|||||||
private emitMessage = (msg: Uint8Array) => {
|
private emitMessage = (msg: Uint8Array) => {
|
||||||
const str = new TextDecoder().decode(msg);
|
const str = new TextDecoder().decode(msg);
|
||||||
const msgs: ButtplugMessage[] = JSON.parse(str);
|
const msgs: ButtplugMessage[] = JSON.parse(str);
|
||||||
this.emit('message', msgs);
|
this.emit("message", msgs);
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,10 +6,10 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
'use strict';
|
"use strict";
|
||||||
|
|
||||||
import { EventEmitter } from 'eventemitter3';
|
import { EventEmitter } from "eventemitter3";
|
||||||
import { ButtplugMessage } from '../core/Messages';
|
import { ButtplugMessage } from "../core/Messages";
|
||||||
|
|
||||||
export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
||||||
protected _ws: WebSocket | undefined;
|
protected _ws: WebSocket | undefined;
|
||||||
@@ -26,18 +26,20 @@ export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
|||||||
public connect = async (): Promise<void> => {
|
public connect = async (): Promise<void> => {
|
||||||
return new Promise<void>((resolve, reject) => {
|
return new Promise<void>((resolve, reject) => {
|
||||||
const ws = new (this._websocketConstructor ?? WebSocket)(this._url);
|
const ws = new (this._websocketConstructor ?? WebSocket)(this._url);
|
||||||
const onErrorCallback = (event: Event) => {reject(event)}
|
const onErrorCallback = (event: Event) => {
|
||||||
const onCloseCallback = (event: CloseEvent) => reject(event.reason)
|
reject(event);
|
||||||
ws.addEventListener('open', async () => {
|
};
|
||||||
|
const onCloseCallback = (event: CloseEvent) => reject(event.reason);
|
||||||
|
ws.addEventListener("open", async () => {
|
||||||
this._ws = ws;
|
this._ws = ws;
|
||||||
try {
|
try {
|
||||||
await this.initialize();
|
await this.initialize();
|
||||||
this._ws.addEventListener('message', (msg) => {
|
this._ws.addEventListener("message", (msg) => {
|
||||||
this.parseIncomingMessage(msg);
|
this.parseIncomingMessage(msg);
|
||||||
});
|
});
|
||||||
this._ws.removeEventListener('close', onCloseCallback);
|
this._ws.removeEventListener("close", onCloseCallback);
|
||||||
this._ws.removeEventListener('error', onErrorCallback);
|
this._ws.removeEventListener("error", onErrorCallback);
|
||||||
this._ws.addEventListener('close', this.disconnect);
|
this._ws.addEventListener("close", this.disconnect);
|
||||||
resolve();
|
resolve();
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
reject(e);
|
reject(e);
|
||||||
@@ -47,8 +49,8 @@ export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
|||||||
// browsers usually only throw Error Code 1006. It's up to those using this
|
// browsers usually only throw Error Code 1006. It's up to those using this
|
||||||
// library to state what the problem might be.
|
// library to state what the problem might be.
|
||||||
|
|
||||||
ws.addEventListener('error', onErrorCallback)
|
ws.addEventListener("error", onErrorCallback);
|
||||||
ws.addEventListener('close', onCloseCallback);
|
ws.addEventListener("close", onCloseCallback);
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -58,14 +60,14 @@ export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
|||||||
}
|
}
|
||||||
this._ws!.close();
|
this._ws!.close();
|
||||||
this._ws = undefined;
|
this._ws = undefined;
|
||||||
this.emit('disconnect');
|
this.emit("disconnect");
|
||||||
};
|
};
|
||||||
|
|
||||||
public sendMessage(msg: ButtplugMessage) {
|
public sendMessage(msg: ButtplugMessage) {
|
||||||
if (!this.Connected) {
|
if (!this.Connected) {
|
||||||
throw new Error('ButtplugBrowserWebsocketConnector not connected');
|
throw new Error("ButtplugBrowserWebsocketConnector not connected");
|
||||||
}
|
}
|
||||||
this._ws!.send('[' + JSON.stringify(msg) + ']');
|
this._ws!.send("[" + JSON.stringify(msg) + "]");
|
||||||
}
|
}
|
||||||
|
|
||||||
public initialize = async (): Promise<void> => {
|
public initialize = async (): Promise<void> => {
|
||||||
@@ -73,9 +75,9 @@ export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
|||||||
};
|
};
|
||||||
|
|
||||||
protected parseIncomingMessage(event: MessageEvent) {
|
protected parseIncomingMessage(event: MessageEvent) {
|
||||||
if (typeof event.data === 'string') {
|
if (typeof event.data === "string") {
|
||||||
const msgs: ButtplugMessage[] = JSON.parse(event.data);
|
const msgs: ButtplugMessage[] = JSON.parse(event.data);
|
||||||
this.emit('message', msgs);
|
this.emit("message", msgs);
|
||||||
} else if (event.data instanceof Blob) {
|
} else if (event.data instanceof Blob) {
|
||||||
// No-op, we only use text message types.
|
// No-op, we only use text message types.
|
||||||
}
|
}
|
||||||
@@ -83,6 +85,6 @@ export class ButtplugBrowserWebsocketConnector extends EventEmitter {
|
|||||||
|
|
||||||
protected onReaderLoad(event: Event) {
|
protected onReaderLoad(event: Event) {
|
||||||
const msgs: ButtplugMessage[] = JSON.parse((event.target as FileReader).result as string);
|
const msgs: ButtplugMessage[] = JSON.parse((event.target as FileReader).result as string);
|
||||||
this.emit('message', msgs);
|
this.emit("message", msgs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,8 +6,8 @@
|
|||||||
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
* @copyright Copyright (c) Nonpolynomial Labs LLC. All rights reserved.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
import * as Messages from '../core/Messages';
|
import * as Messages from "../core/Messages";
|
||||||
import { ButtplugError } from '../core/Exceptions';
|
import { ButtplugError } from "../core/Exceptions";
|
||||||
|
|
||||||
export class ButtplugMessageSorter {
|
export class ButtplugMessageSorter {
|
||||||
protected _counter = 1;
|
protected _counter = 1;
|
||||||
@@ -21,9 +21,7 @@ export class ButtplugMessageSorter {
|
|||||||
// One of the places we should actually return a promise, as we need to store
|
// One of the places we should actually return a promise, as we need to store
|
||||||
// them while waiting for them to return across the line.
|
// them while waiting for them to return across the line.
|
||||||
// tslint:disable:promise-function-async
|
// tslint:disable:promise-function-async
|
||||||
public PrepareOutgoingMessage(
|
public PrepareOutgoingMessage(msg: Messages.ButtplugMessage): Promise<Messages.ButtplugMessage> {
|
||||||
msg: Messages.ButtplugMessage
|
|
||||||
): Promise<Messages.ButtplugMessage> {
|
|
||||||
if (this._useCounter) {
|
if (this._useCounter) {
|
||||||
Messages.setMsgId(msg, this._counter);
|
Messages.setMsgId(msg, this._counter);
|
||||||
// Always increment last, otherwise we might lose sync
|
// Always increment last, otherwise we might lose sync
|
||||||
@@ -31,19 +29,15 @@ export class ButtplugMessageSorter {
|
|||||||
}
|
}
|
||||||
let res;
|
let res;
|
||||||
let rej;
|
let rej;
|
||||||
const msgPromise = new Promise<Messages.ButtplugMessage>(
|
const msgPromise = new Promise<Messages.ButtplugMessage>((resolve, reject) => {
|
||||||
(resolve, reject) => {
|
res = resolve;
|
||||||
res = resolve;
|
rej = reject;
|
||||||
rej = reject;
|
});
|
||||||
}
|
|
||||||
);
|
|
||||||
this._waitingMsgs.set(Messages.msgId(msg), [res, rej]);
|
this._waitingMsgs.set(Messages.msgId(msg), [res, rej]);
|
||||||
return msgPromise;
|
return msgPromise;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ParseIncomingMessages(
|
public ParseIncomingMessages(msgs: Messages.ButtplugMessage[]): Messages.ButtplugMessage[] {
|
||||||
msgs: Messages.ButtplugMessage[]
|
|
||||||
): Messages.ButtplugMessage[] {
|
|
||||||
const noMatch: Messages.ButtplugMessage[] = [];
|
const noMatch: Messages.ButtplugMessage[] = [];
|
||||||
for (const x of msgs) {
|
for (const x of msgs) {
|
||||||
let id = Messages.msgId(x);
|
let id = Messages.msgId(x);
|
||||||
|
|||||||
@@ -1,3 +1,3 @@
|
|||||||
export function getRandomInt(max: number) {
|
export function getRandomInt(max: number) {
|
||||||
return Math.floor(Math.random() * Math.floor(max));
|
return Math.floor(Math.random() * Math.floor(max));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ use buttplug_server::device::hardware::communication::{
|
|||||||
HardwareCommunicationManagerEvent,
|
HardwareCommunicationManagerEvent,
|
||||||
};
|
};
|
||||||
use futures::future;
|
use futures::future;
|
||||||
use js_sys::Array;
|
|
||||||
use tokio::sync::mpsc::Sender;
|
use tokio::sync::mpsc::Sender;
|
||||||
use wasm_bindgen::prelude::*;
|
use wasm_bindgen::prelude::*;
|
||||||
use wasm_bindgen_futures::{spawn_local, JsFuture};
|
use wasm_bindgen_futures::{spawn_local, JsFuture};
|
||||||
@@ -63,8 +62,8 @@ impl HardwareCommunicationManager for WebBluetoothCommunicationManager {
|
|||||||
// way for anyone to add device configurations through FFI yet anyways.
|
// way for anyone to add device configurations through FFI yet anyways.
|
||||||
let config_manager = create_test_dcm(false);
|
let config_manager = create_test_dcm(false);
|
||||||
let options = web_sys::RequestDeviceOptions::new();
|
let options = web_sys::RequestDeviceOptions::new();
|
||||||
let filters = Array::new();
|
let mut filters = Vec::new();
|
||||||
let optional_services = Array::new();
|
let mut optional_services = Vec::new();
|
||||||
for vals in config_manager.base_communication_specifiers().iter() {
|
for vals in config_manager.base_communication_specifiers().iter() {
|
||||||
for config in vals.1.iter() {
|
for config in vals.1.iter() {
|
||||||
if let ProtocolCommunicationSpecifier::BluetoothLE(btle) = &config {
|
if let ProtocolCommunicationSpecifier::BluetoothLE(btle) = &config {
|
||||||
@@ -77,16 +76,16 @@ impl HardwareCommunicationManager for WebBluetoothCommunicationManager {
|
|||||||
} else {
|
} else {
|
||||||
filter.set_name(&name);
|
filter.set_name(&name);
|
||||||
}
|
}
|
||||||
filters.push(&filter.into());
|
filters.push(filter);
|
||||||
}
|
}
|
||||||
for (service, _) in btle.services() {
|
for (service, _) in btle.services() {
|
||||||
optional_services.push(&service.to_string().into());
|
optional_services.push(js_sys::JsString::from(service.to_string()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
options.set_filters(&filters.into());
|
options.set_filters(&filters);
|
||||||
options.set_optional_services(&optional_services.into());
|
options.set_optional_services(&optional_services);
|
||||||
let nav = web_sys::window().unwrap().navigator();
|
let nav = web_sys::window().unwrap().navigator();
|
||||||
//nav.bluetooth().get_availability();
|
//nav.bluetooth().get_availability();
|
||||||
//JsFuture::from(nav.bluetooth().request_device()).await;
|
//JsFuture::from(nav.bluetooth().request_device()).await;
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"target": "esnext",
|
"target": "esnext",
|
||||||
"module": "esnext",
|
"module": "esnext",
|
||||||
"outDir": "dist",
|
"outDir": "dist",
|
||||||
"moduleResolution": "bundler",
|
"moduleResolution": "bundler",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"skipLibCheck": true
|
"skipLibCheck": true
|
||||||
},
|
},
|
||||||
"include": ["src"]
|
"include": ["src"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -3,19 +3,19 @@ import path from "path";
|
|||||||
import wasm from "vite-plugin-wasm";
|
import wasm from "vite-plugin-wasm";
|
||||||
|
|
||||||
export default defineConfig({
|
export default defineConfig({
|
||||||
plugins: [wasm()], // include wasm plugin
|
plugins: [wasm()], // include wasm plugin
|
||||||
build: {
|
build: {
|
||||||
lib: {
|
lib: {
|
||||||
entry: path.resolve(__dirname, "src/index.ts"),
|
entry: path.resolve(__dirname, "src/index.ts"),
|
||||||
name: "buttplug",
|
name: "buttplug",
|
||||||
fileName: "index",
|
fileName: "index",
|
||||||
formats: ["es"], // this is important
|
formats: ["es"], // this is important
|
||||||
},
|
},
|
||||||
minify: false, // for demo purposes
|
minify: false, // for demo purposes
|
||||||
target: "esnext", // this is important as well
|
target: "esnext", // this is important as well
|
||||||
outDir: "dist",
|
outDir: "dist",
|
||||||
rollupOptions: {
|
rollupOptions: {
|
||||||
external: [/\.\/wasm\//, /\.\.\/wasm\//],
|
external: [/\.\/wasm\//, /\.\.\/wasm\//],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
PUBLIC_API_URL=https://sexy.pivoine.art/api
|
PUBLIC_API_URL=http://localhost:3000/api
|
||||||
PUBLIC_URL=https://sexy.pivoine.art
|
PUBLIC_URL=http://localhost:3000
|
||||||
PUBLIC_UMAMI_ID=
|
PUBLIC_UMAMI_ID=
|
||||||
LETTERSPACE_API_URL=
|
PUBLIC_UMAMI_SCRIPT=
|
||||||
LETTERSPACE_API_KEY=
|
|
||||||
LETTERSPACE_LIST_ID=
|
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://shadcn-svelte.com/schema.json",
|
"$schema": "https://shadcn-svelte.com/schema.json",
|
||||||
"tailwind": {
|
"tailwind": {
|
||||||
"css": "src/app.css",
|
"css": "src/app.css",
|
||||||
"baseColor": "slate"
|
"baseColor": "slate"
|
||||||
},
|
},
|
||||||
"aliases": {
|
"aliases": {
|
||||||
"components": "$lib/components",
|
"components": "$lib/components",
|
||||||
"utils": "$lib/utils",
|
"utils": "$lib/utils",
|
||||||
"ui": "$lib/components/ui",
|
"ui": "$lib/components/ui",
|
||||||
"hooks": "$lib/hooks",
|
"hooks": "$lib/hooks",
|
||||||
"lib": "$lib"
|
"lib": "$lib"
|
||||||
},
|
},
|
||||||
"typescript": true,
|
"typescript": true,
|
||||||
"registry": "https://shadcn-svelte.com/registry"
|
"registry": "https://shadcn-svelte.com/registry"
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,16 +1,16 @@
|
|||||||
{
|
{
|
||||||
"$schema": "https://unpkg.com/jsrepo@2.4.9/schemas/project-config.json",
|
"$schema": "https://unpkg.com/jsrepo@2.4.9/schemas/project-config.json",
|
||||||
"repos": ["@ieedan/shadcn-svelte-extras"],
|
"repos": ["@ieedan/shadcn-svelte-extras"],
|
||||||
"includeTests": false,
|
"includeTests": false,
|
||||||
"includeDocs": false,
|
"includeDocs": false,
|
||||||
"watermark": true,
|
"watermark": true,
|
||||||
"formatter": "prettier",
|
"formatter": "prettier",
|
||||||
"configFiles": {},
|
"configFiles": {},
|
||||||
"paths": {
|
"paths": {
|
||||||
"*": "$lib/blocks",
|
"*": "$lib/blocks",
|
||||||
"ui": "$lib/components/ui",
|
"ui": "$lib/components/ui",
|
||||||
"actions": "$lib/actions",
|
"actions": "$lib/actions",
|
||||||
"hooks": "$lib/hooks",
|
"hooks": "$lib/hooks",
|
||||||
"utils": "$lib/utils"
|
"utils": "$lib/utils"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,42 +8,47 @@
|
|||||||
"dev": "vite",
|
"dev": "vite",
|
||||||
"build": "vite build",
|
"build": "vite build",
|
||||||
"preview": "vite preview",
|
"preview": "vite preview",
|
||||||
"start": "node ./build"
|
"start": "node ./build",
|
||||||
|
"check": "svelte-check --tsconfig ./tsconfig.json --threshold warning"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@iconify-json/ri": "^1.2.5",
|
"@iconify-json/ri": "^1.2.10",
|
||||||
"@iconify/tailwind4": "^1.0.6",
|
"@iconify/tailwind4": "^1.2.1",
|
||||||
"@internationalized/date": "^3.8.2",
|
"@internationalized/date": "^3.11.0",
|
||||||
"@lucide/svelte": "^0.544.0",
|
"@lucide/svelte": "^0.561.0",
|
||||||
"@sveltejs/adapter-node": "^5.3.1",
|
"@sveltejs/adapter-node": "^5.5.4",
|
||||||
"@sveltejs/adapter-static": "^3.0.9",
|
"@sveltejs/adapter-static": "^3.0.10",
|
||||||
"@sveltejs/kit": "^2.37.0",
|
"@sveltejs/kit": "^2.53.4",
|
||||||
"@sveltejs/vite-plugin-svelte": "^6.1.4",
|
"@sveltejs/vite-plugin-svelte": "^6.2.4",
|
||||||
"@tailwindcss/forms": "^0.5.9",
|
"@tailwindcss/forms": "^0.5.11",
|
||||||
"@tailwindcss/typography": "^0.5.15",
|
"@tailwindcss/typography": "^0.5.19",
|
||||||
"@tailwindcss/vite": "^4.0.0",
|
"@tailwindcss/vite": "^4.2.1",
|
||||||
"@tsconfig/svelte": "^5.0.5",
|
"@tsconfig/svelte": "^5.0.8",
|
||||||
"bits-ui": "2.11.0",
|
"bits-ui": "2.16.2",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"glob": "^11.0.3",
|
"glob": "^13.0.6",
|
||||||
"mode-watcher": "^1.1.0",
|
"mode-watcher": "^1.1.0",
|
||||||
"prettier-plugin-svelte": "^3.4.0",
|
"prettier-plugin-svelte": "^3.5.1",
|
||||||
"super-sitemap": "^1.0.5",
|
"super-sitemap": "^1.0.7",
|
||||||
"svelte": "^5.38.6",
|
"svelte": "^5.53.7",
|
||||||
"svelte-sonner": "^1.0.5",
|
"svelte-check": "^4.4.4",
|
||||||
"tailwind-merge": "^3.3.1",
|
"svelte-sonner": "^1.0.8",
|
||||||
"tailwind-variants": "^1.0.0",
|
"tailwind-merge": "^3.5.0",
|
||||||
"tailwindcss": "^4.0.0",
|
"tailwind-variants": "^3.2.2",
|
||||||
"tw-animate-css": "^1.3.8",
|
"tailwindcss": "^4.2.1",
|
||||||
"typescript": "^5.9.2",
|
"tw-animate-css": "^1.4.0",
|
||||||
"vite": "^7.1.4",
|
"typescript": "^5.9.3",
|
||||||
|
"vite": "^7.3.1",
|
||||||
"vite-plugin-wasm": "3.5.0"
|
"vite-plugin-wasm": "3.5.0"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@directus/sdk": "^20.0.3",
|
|
||||||
"@sexy.pivoine.art/buttplug": "workspace:*",
|
"@sexy.pivoine.art/buttplug": "workspace:*",
|
||||||
"javascript-time-ago": "^2.5.11",
|
"@sexy.pivoine.art/types": "workspace:*",
|
||||||
"media-chrome": "^4.13.1",
|
"graphql": "^16.11.0",
|
||||||
|
"graphql-request": "^7.1.2",
|
||||||
|
"javascript-time-ago": "^2.6.4",
|
||||||
|
"marked": "^17.0.4",
|
||||||
|
"media-chrome": "^4.18.0",
|
||||||
"svelte-i18n": "^4.0.1"
|
"svelte-i18n": "^4.0.1"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,83 +5,85 @@
|
|||||||
|
|
||||||
@custom-variant dark (&:where(.dark, .dark *));
|
@custom-variant dark (&:where(.dark, .dark *));
|
||||||
|
|
||||||
|
@custom-variant hover (&:hover);
|
||||||
|
|
||||||
@theme {
|
@theme {
|
||||||
--animate-vibrate: vibrate 0.3s linear infinite;
|
--animate-vibrate: vibrate 0.3s linear infinite;
|
||||||
--animate-fade-in: fadeIn 0.3s ease-out;
|
--animate-fade-in: fadeIn 0.3s ease-out;
|
||||||
--animate-slide-up: slideUp 0.4s cubic-bezier(0.4, 0, 0.2, 1);
|
--animate-slide-up: slideUp 0.4s cubic-bezier(0.4, 0, 0.2, 1);
|
||||||
--animate-zoom-in: zoomIn 0.4s cubic-bezier(0.4, 0, 0.2, 1);
|
--animate-zoom-in: zoomIn 0.4s cubic-bezier(0.4, 0, 0.2, 1);
|
||||||
--animate-pulse-glow: pulseGlow 2s infinite;
|
--animate-pulse-glow: pulseGlow 2s infinite;
|
||||||
|
|
||||||
@keyframes vibrate {
|
@keyframes vibrate {
|
||||||
0% {
|
0% {
|
||||||
transform: translate(0);
|
transform: translate(0);
|
||||||
}
|
}
|
||||||
|
|
||||||
20% {
|
20% {
|
||||||
transform: translate(-2px, 2px);
|
transform: translate(-2px, 2px);
|
||||||
}
|
}
|
||||||
|
|
||||||
40% {
|
40% {
|
||||||
transform: translate(-2px, -2px);
|
transform: translate(-2px, -2px);
|
||||||
}
|
}
|
||||||
|
|
||||||
60% {
|
60% {
|
||||||
transform: translate(2px, 2px);
|
transform: translate(2px, 2px);
|
||||||
}
|
}
|
||||||
|
|
||||||
80% {
|
80% {
|
||||||
transform: translate(2px, -2px);
|
transform: translate(2px, -2px);
|
||||||
}
|
}
|
||||||
|
|
||||||
100% {
|
100% {
|
||||||
transform: translate(0);
|
transform: translate(0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@keyframes fadeIn {
|
@keyframes fadeIn {
|
||||||
0% {
|
0% {
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
100% {
|
100% {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@keyframes slideUp {
|
@keyframes slideUp {
|
||||||
0% {
|
0% {
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
transform: translateY(30px) scale(0.95);
|
transform: translateY(30px) scale(0.95);
|
||||||
}
|
}
|
||||||
|
|
||||||
100% {
|
100% {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
transform: translateY(0) scale(1);
|
transform: translateY(0) scale(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@keyframes zoomIn {
|
@keyframes zoomIn {
|
||||||
0% {
|
0% {
|
||||||
opacity: 0;
|
opacity: 0;
|
||||||
transform: scale(0.9);
|
transform: scale(0.9);
|
||||||
}
|
}
|
||||||
|
|
||||||
100% {
|
100% {
|
||||||
opacity: 1;
|
opacity: 1;
|
||||||
transform: scale(1);
|
transform: scale(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@keyframes pulseGlow {
|
@keyframes pulseGlow {
|
||||||
0%,
|
0%,
|
||||||
100% {
|
100% {
|
||||||
boxShadow: 0 0 20px rgba(183, 0, 217, 0.3);
|
boxshadow: 0 0 20px rgba(183, 0, 217, 0.3);
|
||||||
}
|
}
|
||||||
|
|
||||||
50% {
|
50% {
|
||||||
boxShadow: 0 0 40px rgba(183, 0, 217, 0.6);
|
boxshadow: 0 0 40px rgba(183, 0, 217, 0.6);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@@ -93,134 +95,159 @@
|
|||||||
color utility to any element that depends on these defaults.
|
color utility to any element that depends on these defaults.
|
||||||
*/
|
*/
|
||||||
@layer base {
|
@layer base {
|
||||||
* {
|
* {
|
||||||
@supports (color: color-mix(in lab, red, red)) {
|
@supports (color: color-mix(in lab, red, red)) {
|
||||||
outline-color: color-mix(in oklab, var(--ring) 50%, transparent);
|
outline-color: color-mix(in oklab, var(--ring) 50%, transparent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
* {
|
* {
|
||||||
border-color: var(--border);
|
border-color: var(--border);
|
||||||
outline-color: var(--ring);
|
outline-color: var(--ring);
|
||||||
}
|
scrollbar-width: thin;
|
||||||
|
scrollbar-color: color-mix(in oklab, var(--primary) 40%, transparent) transparent;
|
||||||
|
}
|
||||||
|
|
||||||
.prose h2 {
|
*::-webkit-scrollbar {
|
||||||
@apply text-2xl font-bold mt-8 mb-4 text-foreground;
|
width: 6px;
|
||||||
}
|
height: 6px;
|
||||||
|
}
|
||||||
|
|
||||||
.prose h3 {
|
*::-webkit-scrollbar-track {
|
||||||
@apply text-xl font-semibold mt-6 mb-3 text-foreground;
|
background: transparent;
|
||||||
}
|
}
|
||||||
|
|
||||||
.prose p {
|
*::-webkit-scrollbar-thumb {
|
||||||
@apply mb-4 leading-relaxed;
|
background-color: color-mix(in oklab, var(--primary) 40%, transparent);
|
||||||
}
|
border-radius: 9999px;
|
||||||
|
}
|
||||||
|
|
||||||
.prose ul {
|
*::-webkit-scrollbar-thumb:hover {
|
||||||
@apply mb-4 pl-6;
|
background-color: color-mix(in oklab, var(--primary) 70%, transparent);
|
||||||
}
|
}
|
||||||
|
|
||||||
.prose li {
|
html {
|
||||||
@apply mb-2;
|
scrollbar-width: thin;
|
||||||
}
|
scrollbar-color: color-mix(in oklab, var(--primary) 40%, transparent) transparent;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prose h2 {
|
||||||
|
@apply text-2xl font-bold mt-8 mb-4 text-foreground;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prose h3 {
|
||||||
|
@apply text-xl font-semibold mt-6 mb-3 text-foreground;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prose p {
|
||||||
|
@apply mb-4 leading-relaxed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prose ul {
|
||||||
|
@apply mb-4 pl-6;
|
||||||
|
}
|
||||||
|
|
||||||
|
.prose li {
|
||||||
|
@apply mb-2;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
:root {
|
:root {
|
||||||
--default-font-family: "Noto Sans", sans-serif;
|
--default-font-family: "Noto Sans", sans-serif;
|
||||||
--background: oklch(0.98 0.01 320);
|
--background: oklch(0.98 0.01 320);
|
||||||
--foreground: oklch(0.08 0.02 280);
|
--foreground: oklch(0.08 0.02 280);
|
||||||
--muted: oklch(0.95 0.01 280);
|
--muted: oklch(0.95 0.01 280);
|
||||||
--muted-foreground: oklch(0.4 0.02 280);
|
--muted-foreground: oklch(0.4 0.02 280);
|
||||||
--popover: oklch(1 0 0);
|
--popover: oklch(1 0 0);
|
||||||
--popover-foreground: oklch(0.145 0 0);
|
--popover-foreground: oklch(0.145 0 0);
|
||||||
--card: oklch(0.99 0.005 320);
|
--card: oklch(0.99 0.005 320);
|
||||||
--card-foreground: oklch(0.08 0.02 280);
|
--card-foreground: oklch(0.08 0.02 280);
|
||||||
--border: oklch(0.85 0.02 280);
|
--border: oklch(0.85 0.02 280);
|
||||||
--input: oklch(0.922 0 0);
|
--input: oklch(0.922 0 0);
|
||||||
--primary: oklch(56.971% 0.27455 319.257);
|
--primary: oklch(56.971% 0.27455 319.257);
|
||||||
--primary-foreground: oklch(0.98 0.01 320);
|
--primary-foreground: oklch(0.98 0.01 320);
|
||||||
--secondary: oklch(0.92 0.02 260);
|
--secondary: oklch(0.92 0.02 260);
|
||||||
--secondary-foreground: oklch(0.15 0.05 260);
|
--secondary-foreground: oklch(0.15 0.05 260);
|
||||||
--accent: oklch(0.45 0.35 280);
|
--accent: oklch(0.45 0.35 280);
|
||||||
--accent-foreground: oklch(0.98 0.01 280);
|
--accent-foreground: oklch(0.98 0.01 280);
|
||||||
--destructive: oklch(0.577 0.245 27.325);
|
--destructive: oklch(0.577 0.245 27.325);
|
||||||
--destructive-foreground: oklch(0.985 0 0);
|
--destructive-foreground: oklch(0.985 0 0);
|
||||||
--ring: oklch(0.55 0.3 320);
|
--ring: oklch(0.55 0.3 320);
|
||||||
--sidebar: oklch(0.985 0 0);
|
--sidebar: oklch(0.985 0 0);
|
||||||
--sidebar-foreground: oklch(0.145 0 0);
|
--sidebar-foreground: oklch(0.145 0 0);
|
||||||
--sidebar-primary: oklch(0.205 0 0);
|
--sidebar-primary: oklch(0.205 0 0);
|
||||||
--sidebar-primary-foreground: oklch(0.985 0 0);
|
--sidebar-primary-foreground: oklch(0.985 0 0);
|
||||||
--sidebar-accent: oklch(0.97 0 0);
|
--sidebar-accent: oklch(0.97 0 0);
|
||||||
--sidebar-accent-foreground: oklch(0.205 0 0);
|
--sidebar-accent-foreground: oklch(0.205 0 0);
|
||||||
--sidebar-border: oklch(0.922 0 0);
|
--sidebar-border: oklch(0.922 0 0);
|
||||||
--sidebar-ring: oklch(0.708 0 0);
|
--sidebar-ring: oklch(0.708 0 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
.dark {
|
.dark {
|
||||||
--background: oklch(0.08 0.02 280);
|
--background: oklch(0.08 0.02 280);
|
||||||
--foreground: oklch(0.98 0.01 280);
|
--foreground: oklch(0.98 0.01 280);
|
||||||
--muted: oklch(0.12 0.03 280);
|
--muted: oklch(0.12 0.03 280);
|
||||||
--muted-foreground: oklch(0.6 0.02 280);
|
--muted-foreground: oklch(0.6 0.02 280);
|
||||||
--popover: oklch(0.205 0 0);
|
--popover: oklch(0.205 0 0);
|
||||||
--popover-foreground: oklch(0.985 0 0);
|
--popover-foreground: oklch(0.985 0 0);
|
||||||
--card: oklch(0.1 0.02 280);
|
--card: oklch(0.1 0.02 280);
|
||||||
--card-foreground: oklch(0.95 0.01 280);
|
--card-foreground: oklch(0.95 0.01 280);
|
||||||
--border: oklch(0.2 0.05 280);
|
--border: oklch(0.2 0.05 280);
|
||||||
--input: oklch(1 0 0 / 0.15);
|
--input: oklch(1 0 0 / 0.15);
|
||||||
--primary: oklch(0.65 0.25 320);
|
--primary: oklch(0.65 0.25 320);
|
||||||
--primary-foreground: oklch(0.98 0.01 320);
|
--primary-foreground: oklch(0.98 0.01 320);
|
||||||
--secondary: oklch(0.15 0.05 260);
|
--secondary: oklch(0.15 0.05 260);
|
||||||
--secondary-foreground: oklch(0.9 0.02 260);
|
--secondary-foreground: oklch(0.9 0.02 260);
|
||||||
--accent: oklch(0.55 0.3 280);
|
--accent: oklch(0.55 0.3 280);
|
||||||
--accent-foreground: oklch(0.98 0.01 280);
|
--accent-foreground: oklch(0.98 0.01 280);
|
||||||
--destructive: oklch(0.704 0.191 22.216);
|
--destructive: oklch(0.704 0.191 22.216);
|
||||||
--destructive-foreground: oklch(0.985 0 0);
|
--destructive-foreground: oklch(0.985 0 0);
|
||||||
--ring: oklch(0.65 0.25 320);
|
--ring: oklch(0.65 0.25 320);
|
||||||
--sidebar: oklch(0.205 0 0);
|
--sidebar: oklch(0.205 0 0);
|
||||||
--sidebar-foreground: oklch(0.985 0 0);
|
--sidebar-foreground: oklch(0.985 0 0);
|
||||||
--sidebar-primary: oklch(0.488 0.243 264.376);
|
--sidebar-primary: oklch(0.488 0.243 264.376);
|
||||||
--sidebar-primary-foreground: oklch(0.985 0 0);
|
--sidebar-primary-foreground: oklch(0.985 0 0);
|
||||||
--sidebar-accent: oklch(0.269 0 0);
|
--sidebar-accent: oklch(0.269 0 0);
|
||||||
--sidebar-accent-foreground: oklch(0.985 0 0);
|
--sidebar-accent-foreground: oklch(0.985 0 0);
|
||||||
--sidebar-border: oklch(1 0 0 / 0.1);
|
--sidebar-border: oklch(1 0 0 / 0.1);
|
||||||
--sidebar-ring: oklch(0.556 0 0);
|
--sidebar-ring: oklch(0.556 0 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@theme inline {
|
@theme inline {
|
||||||
--color-background: var(--background);
|
--color-background: var(--background);
|
||||||
--color-foreground: var(--foreground);
|
--color-foreground: var(--foreground);
|
||||||
--color-card: var(--card);
|
--color-card: var(--card);
|
||||||
--color-card-foreground: var(--card-foreground);
|
--color-card-foreground: var(--card-foreground);
|
||||||
--color-popover: var(--popover);
|
--color-popover: var(--popover);
|
||||||
--color-popover-foreground: var(--popover-foreground);
|
--color-popover-foreground: var(--popover-foreground);
|
||||||
--color-primary: var(--primary);
|
--color-primary: var(--primary);
|
||||||
--color-primary-foreground: var(--primary-foreground);
|
--color-primary-foreground: var(--primary-foreground);
|
||||||
--color-secondary: var(--secondary);
|
--color-secondary: var(--secondary);
|
||||||
--color-secondary-foreground: var(--secondary-foreground);
|
--color-secondary-foreground: var(--secondary-foreground);
|
||||||
--color-muted: var(--muted);
|
--color-muted: var(--muted);
|
||||||
--color-muted-foreground: var(--muted-foreground);
|
--color-muted-foreground: var(--muted-foreground);
|
||||||
--color-accent: var(--accent);
|
--color-accent: var(--accent);
|
||||||
--color-accent-foreground: var(--accent-foreground);
|
--color-accent-foreground: var(--accent-foreground);
|
||||||
--color-destructive: var(--destructive);
|
--color-destructive: var(--destructive);
|
||||||
--color-destructive-foreground: var(--destructive-foreground);
|
--color-destructive-foreground: var(--destructive-foreground);
|
||||||
--color-border: var(--border);
|
--color-border: var(--border);
|
||||||
--color-input: var(--input);
|
--color-input: var(--input);
|
||||||
--color-ring: var(--ring);
|
--color-ring: var(--ring);
|
||||||
--color-chart-1: var(--chart-1);
|
--color-chart-1: var(--chart-1);
|
||||||
--color-chart-2: var(--chart-2);
|
--color-chart-2: var(--chart-2);
|
||||||
--color-chart-3: var(--chart-3);
|
--color-chart-3: var(--chart-3);
|
||||||
--color-chart-4: var(--chart-4);
|
--color-chart-4: var(--chart-4);
|
||||||
--color-chart-5: var(--chart-5);
|
--color-chart-5: var(--chart-5);
|
||||||
--color-sidebar: var(--sidebar);
|
--color-sidebar: var(--sidebar);
|
||||||
--color-sidebar-foreground: var(--sidebar-foreground);
|
--color-sidebar-foreground: var(--sidebar-foreground);
|
||||||
--color-sidebar-primary: var(--sidebar-primary);
|
--color-sidebar-primary: var(--sidebar-primary);
|
||||||
--color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
|
--color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
|
||||||
--color-sidebar-accent: var(--sidebar-accent);
|
--color-sidebar-accent: var(--sidebar-accent);
|
||||||
--color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
|
--color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
|
||||||
--color-sidebar-border: var(--sidebar-border);
|
--color-sidebar-border: var(--sidebar-border);
|
||||||
--color-sidebar-ring: var(--sidebar-ring);
|
--color-sidebar-ring: var(--sidebar-ring);
|
||||||
|
|
||||||
--font-sans: var(--font-sans);
|
--font-sans: var(--font-sans);
|
||||||
--font-mono: var(--font-mono);
|
--font-mono: var(--font-mono);
|
||||||
--font-serif: var(--font-serif);
|
--font-serif: var(--font-serif);
|
||||||
}
|
}
|
||||||
|
|||||||
32
packages/frontend/src/app.d.ts
vendored
32
packages/frontend/src/app.d.ts
vendored
@@ -4,22 +4,22 @@ import type { AuthStatus } from "$lib/types";
|
|||||||
|
|
||||||
// for information about these interfaces
|
// for information about these interfaces
|
||||||
declare global {
|
declare global {
|
||||||
namespace App {
|
namespace App {
|
||||||
// interface Error {}
|
// interface Error {}
|
||||||
interface Locals {
|
interface Locals {
|
||||||
authStatus: AuthStatus;
|
authStatus: AuthStatus;
|
||||||
requestId: string;
|
requestId: string;
|
||||||
}
|
}
|
||||||
// interface PageData {}
|
// interface PageData {}
|
||||||
// interface PageState {}
|
// interface PageState {}
|
||||||
// interface Platform {}
|
// interface Platform {}
|
||||||
}
|
}
|
||||||
interface Window {
|
interface Window {
|
||||||
sidebar: {
|
sidebar: {
|
||||||
addPanel: () => void;
|
addPanel: () => void;
|
||||||
};
|
};
|
||||||
opera: object;
|
opera: object;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export {};
|
export {};
|
||||||
|
|||||||
@@ -1,24 +1,23 @@
|
|||||||
<!doctype html>
|
<!doctype html>
|
||||||
<html lang="en">
|
<html lang="en">
|
||||||
|
<head>
|
||||||
<head>
|
|
||||||
<meta charset="utf-8" />
|
<meta charset="utf-8" />
|
||||||
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
||||||
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
|
<link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png" />
|
||||||
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
|
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
|
||||||
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
|
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
|
||||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||||
<link href="https://fonts.googleapis.com/css2?family=Dancing+Script:wght@400..700&family=Noto+Sans:ital,wght@0,100..900;1,100..900&display=swap" rel="stylesheet">
|
<link
|
||||||
|
href="https://fonts.googleapis.com/css2?family=Dancing+Script:wght@400..700&family=Noto+Sans:ital,wght@0,100..900;1,100..900&display=swap"
|
||||||
|
rel="stylesheet"
|
||||||
|
/>
|
||||||
|
|
||||||
<link rel="manifest" href="/site.webmanifest" />
|
<link rel="manifest" href="/site.webmanifest" />
|
||||||
%sveltekit.head%
|
%sveltekit.head%
|
||||||
</head>
|
</head>
|
||||||
|
|
||||||
<body data-sveltekit-preload-data="hover" class="dark">
|
<body data-sveltekit-preload-data="hover" class="dark">
|
||||||
<div style="display: contents">%sveltekit.body%</div>
|
<div style="display: contents">%sveltekit.body%</div>
|
||||||
</body>
|
</body>
|
||||||
|
|
||||||
</html>
|
</html>
|
||||||
@@ -2,96 +2,92 @@ import { isAuthenticated } from "$lib/services";
|
|||||||
import { logger, generateRequestId } from "$lib/logger";
|
import { logger, generateRequestId } from "$lib/logger";
|
||||||
import type { Handle } from "@sveltejs/kit";
|
import type { Handle } from "@sveltejs/kit";
|
||||||
|
|
||||||
// Log startup info once
|
// Log startup info once (module-level code runs exactly once on import)
|
||||||
let hasLoggedStartup = false;
|
logger.startup();
|
||||||
if (!hasLoggedStartup) {
|
|
||||||
logger.startup();
|
|
||||||
hasLoggedStartup = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const handle: Handle = async ({ event, resolve }) => {
|
export const handle: Handle = async ({ event, resolve }) => {
|
||||||
const { cookies, locals, url, request } = event;
|
const { cookies, locals, url, request } = event;
|
||||||
const startTime = Date.now();
|
const startTime = Date.now();
|
||||||
|
|
||||||
// Generate unique request ID
|
// Generate unique request ID
|
||||||
const requestId = generateRequestId();
|
const requestId = generateRequestId();
|
||||||
|
|
||||||
// Add request ID to locals for access in other handlers
|
// Add request ID to locals for access in other handlers
|
||||||
locals.requestId = requestId;
|
locals.requestId = requestId;
|
||||||
|
|
||||||
// Log incoming request
|
// Log incoming request
|
||||||
logger.request(request.method, url.pathname, {
|
logger.request(request.method, url.pathname, {
|
||||||
requestId,
|
requestId,
|
||||||
context: {
|
context: {
|
||||||
userAgent: request.headers.get('user-agent')?.substring(0, 100),
|
userAgent: request.headers.get("user-agent")?.substring(0, 100),
|
||||||
referer: request.headers.get('referer'),
|
referer: request.headers.get("referer"),
|
||||||
ip: request.headers.get('x-forwarded-for') || request.headers.get('x-real-ip'),
|
ip: request.headers.get("x-forwarded-for") || request.headers.get("x-real-ip"),
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Handle authentication
|
// Handle authentication
|
||||||
const token = cookies.get("directus_session_token");
|
const token = cookies.get("session_token");
|
||||||
|
|
||||||
if (token) {
|
if (token) {
|
||||||
try {
|
try {
|
||||||
locals.authStatus = await isAuthenticated(token);
|
locals.authStatus = await isAuthenticated(token);
|
||||||
|
|
||||||
if (locals.authStatus.authenticated) {
|
if (locals.authStatus.authenticated) {
|
||||||
logger.auth('Token validated', true, {
|
logger.auth("Token validated", true, {
|
||||||
requestId,
|
requestId,
|
||||||
userId: locals.authStatus.user?.id,
|
userId: locals.authStatus.user?.id,
|
||||||
context: {
|
context: {
|
||||||
email: locals.authStatus.user?.email,
|
email: locals.authStatus.user?.email,
|
||||||
role: locals.authStatus.user?.role?.name,
|
role: locals.authStatus.user?.role,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
logger.auth('Token invalid', false, { requestId });
|
logger.auth("Token invalid", false, { requestId });
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Authentication check failed', {
|
logger.error("Authentication check failed", {
|
||||||
requestId,
|
requestId,
|
||||||
error: error instanceof Error ? error : new Error(String(error)),
|
error: error instanceof Error ? error : new Error(String(error)),
|
||||||
});
|
});
|
||||||
locals.authStatus = { authenticated: false };
|
locals.authStatus = { authenticated: false };
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
logger.debug('No session token found', { requestId });
|
logger.debug("No session token found", { requestId });
|
||||||
locals.authStatus = { authenticated: false };
|
locals.authStatus = { authenticated: false };
|
||||||
}
|
}
|
||||||
|
|
||||||
// Resolve the request
|
// Resolve the request
|
||||||
let response: Response;
|
let response: Response;
|
||||||
try {
|
try {
|
||||||
response = await resolve(event, {
|
response = await resolve(event, {
|
||||||
filterSerializedResponseHeaders: (key) => {
|
filterSerializedResponseHeaders: (key) => {
|
||||||
return key.toLowerCase() === "content-type";
|
return key.toLowerCase() === "content-type";
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
logger.error('Request handler error', {
|
logger.error("Request handler error", {
|
||||||
requestId,
|
requestId,
|
||||||
method: request.method,
|
method: request.method,
|
||||||
path: url.pathname,
|
path: url.pathname,
|
||||||
duration,
|
duration,
|
||||||
error: error instanceof Error ? error : new Error(String(error)),
|
error: error instanceof Error ? error : new Error(String(error)),
|
||||||
});
|
});
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log response
|
// Log response
|
||||||
const duration = Date.now() - startTime;
|
const duration = Date.now() - startTime;
|
||||||
logger.response(request.method, url.pathname, response.status, duration, {
|
logger.response(request.method, url.pathname, response.status, duration, {
|
||||||
requestId,
|
requestId,
|
||||||
userId: locals.authStatus.authenticated ? locals.authStatus.user?.id : undefined,
|
userId: locals.authStatus.authenticated ? locals.authStatus.user?.id : undefined,
|
||||||
context: {
|
context: {
|
||||||
cached: response.headers.get('x-sveltekit-page') === 'true',
|
cached: response.headers.get("x-sveltekit-page") === "true",
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
// Add request ID to response headers (useful for debugging)
|
// Add request ID to response headers (useful for debugging)
|
||||||
response.headers.set('x-request-id', requestId);
|
response.headers.set("x-request-id", requestId);
|
||||||
|
|
||||||
return response;
|
return response;
|
||||||
};
|
};
|
||||||
|
|||||||
25
packages/frontend/src/lib/api.ts
Normal file
25
packages/frontend/src/lib/api.ts
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import { GraphQLClient } from "graphql-request";
|
||||||
|
import { env } from "$env/dynamic/public";
|
||||||
|
import type { CurrentUser } from "./types";
|
||||||
|
|
||||||
|
export const apiUrl = env.PUBLIC_API_URL || "http://localhost:3000/api";
|
||||||
|
|
||||||
|
export const getGraphQLClient = (fetchFn?: typeof globalThis.fetch) =>
|
||||||
|
new GraphQLClient(`${apiUrl}/graphql`, {
|
||||||
|
credentials: "include",
|
||||||
|
fetch: fetchFn || globalThis.fetch,
|
||||||
|
});
|
||||||
|
|
||||||
|
export const getAssetUrl = (
|
||||||
|
id: string | null | undefined,
|
||||||
|
transform?: "mini" | "thumbnail" | "preview" | "medium" | "banner",
|
||||||
|
) => {
|
||||||
|
if (!id) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return `${apiUrl}/assets/${id}${transform ? "?transform=" + transform : ""}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const isModel = (user: CurrentUser) => {
|
||||||
|
return user.role === "model";
|
||||||
|
};
|
||||||
@@ -1,77 +1,69 @@
|
|||||||
<script lang="ts">
|
<script lang="ts">
|
||||||
import { _ } from "svelte-i18n";
|
import { _ } from "svelte-i18n";
|
||||||
import {
|
import {
|
||||||
Dialog,
|
Dialog,
|
||||||
DialogContent,
|
DialogContent,
|
||||||
DialogDescription,
|
DialogDescription,
|
||||||
DialogHeader,
|
DialogHeader,
|
||||||
DialogTitle,
|
DialogTitle,
|
||||||
} from "$lib/components/ui/dialog";
|
} from "$lib/components/ui/dialog";
|
||||||
import { Button } from "$lib/components/ui/button";
|
import { Button } from "$lib/components/ui/button";
|
||||||
import { Separator } from "$lib/components/ui/separator";
|
import { Separator } from "$lib/components/ui/separator";
|
||||||
import { onMount } from "svelte";
|
import { onMount } from "svelte";
|
||||||
|
|
||||||
const AGE_VERIFICATION_KEY = "age-verified";
|
const AGE_VERIFICATION_KEY = "age-verified";
|
||||||
|
|
||||||
let isOpen = true;
|
let isOpen = $state(false);
|
||||||
|
|
||||||
function handleAgeConfirmation() {
|
function handleAgeConfirmation() {
|
||||||
localStorage.setItem(AGE_VERIFICATION_KEY, "true");
|
localStorage.setItem(AGE_VERIFICATION_KEY, "true");
|
||||||
isOpen = false;
|
isOpen = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
const storedVerification = localStorage.getItem(AGE_VERIFICATION_KEY);
|
if (localStorage.getItem(AGE_VERIFICATION_KEY) !== "true") {
|
||||||
if (storedVerification === "true") {
|
isOpen = true;
|
||||||
isOpen = false;
|
}
|
||||||
}
|
});
|
||||||
});
|
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
<Dialog bind:open={isOpen}>
|
<Dialog bind:open={isOpen}>
|
||||||
<DialogContent
|
<DialogContent
|
||||||
class="sm:max-w-md"
|
class="sm:max-w-md"
|
||||||
onInteractOutside={(e) => e.preventDefault()}
|
onInteractOutside={(e) => e.preventDefault()}
|
||||||
showCloseButton={false}
|
showCloseButton={false}
|
||||||
>
|
>
|
||||||
<DialogHeader class="space-y-4">
|
<DialogHeader class="space-y-4">
|
||||||
<div class="flex items-center justify-between">
|
<div class="flex items-center justify-between">
|
||||||
<div class="flex items-center gap-3">
|
<div class="flex items-center gap-3">
|
||||||
<div
|
<div
|
||||||
class="w-10 h-10 shrink-0 grow-0 rounded-full bg-gradient-to-br from-primary to-purple-600 flex items-center justify-center"
|
class="w-10 h-10 shrink-0 grow-0 rounded-full bg-gradient-to-br from-primary to-purple-600 flex items-center justify-center"
|
||||||
>
|
>
|
||||||
<span class="text-primary-foreground text-sm"
|
<span class="text-primary-foreground text-sm">{$_("age_verification_dialog.age")}</span>
|
||||||
>{$_("age_verification_dialog.age")}</span
|
</div>
|
||||||
>
|
<div class="">
|
||||||
</div>
|
<DialogTitle class="text-left text-xl font-semibold text-primary-foreground"
|
||||||
<div class="">
|
>{$_("age_verification_dialog.title")}</DialogTitle
|
||||||
<DialogTitle class="text-left text-xl font-semibold text-primary-foreground"
|
|
||||||
>{$_("age_verification_dialog.title")}</DialogTitle
|
|
||||||
>
|
|
||||||
<DialogDescription class="text-left text-sm">
|
|
||||||
{$_("age_verification_dialog.description")}
|
|
||||||
</DialogDescription>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</DialogHeader>
|
|
||||||
|
|
||||||
<Separator class="my-4" />
|
|
||||||
|
|
||||||
<!-- Close Button -->
|
|
||||||
<div class="flex justify-end gap-4">
|
|
||||||
<Button variant="destructive" href={$_("age_verification_dialog.exit_url")} size="sm">
|
|
||||||
{$_("age_verification_dialog.exit")}
|
|
||||||
</Button>
|
|
||||||
<Button
|
|
||||||
variant="default"
|
|
||||||
size="sm"
|
|
||||||
onclick={handleAgeConfirmation}
|
|
||||||
class="cursor-pointer"
|
|
||||||
>
|
>
|
||||||
<span class="icon-[ri--check-line]"></span>
|
<DialogDescription class="text-left text-sm">
|
||||||
{$_("age_verification_dialog.confirm")}
|
{$_("age_verification_dialog.description")}
|
||||||
</Button>
|
</DialogDescription>
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</DialogContent>
|
</div>
|
||||||
|
</DialogHeader>
|
||||||
|
|
||||||
|
<Separator class="my-4" />
|
||||||
|
|
||||||
|
<!-- Close Button -->
|
||||||
|
<div class="flex justify-end gap-4">
|
||||||
|
<Button variant="destructive" href={$_("age_verification_dialog.exit_url")} size="sm">
|
||||||
|
{$_("age_verification_dialog.exit")}
|
||||||
|
</Button>
|
||||||
|
<Button variant="default" size="sm" onclick={handleAgeConfirmation} class="cursor-pointer">
|
||||||
|
<span class="icon-[ri--check-line]"></span>
|
||||||
|
{$_("age_verification_dialog.confirm")}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
</DialogContent>
|
||||||
</Dialog>
|
</Dialog>
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user