diff --git a/Dockerfile b/Dockerfile index 8de053a..448467b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -64,9 +64,6 @@ RUN pnpm --filter @sexy.pivoine.art/buttplug build # 3. Build frontend RUN pnpm --filter @sexy.pivoine.art/frontend build -# 4. Build Directus bundle -RUN pnpm --filter @sexy.pivoine.art/bundle build - # Prune dev dependencies for production RUN pnpm install -rP @@ -78,7 +75,6 @@ FROM node:22.11.0-slim AS runner # Install dumb-init for proper signal handling RUN apt-get update && apt-get install -y \ dumb-init \ - ffmpeg \ && rm -rf /var/lib/apt/lists/* # Create non-root user @@ -96,18 +92,13 @@ COPY --from=builder --chown=node:node /app/pnpm-lock.yaml ./pnpm-lock.yaml COPY --from=builder --chown=node:node /app/pnpm-workspace.yaml ./pnpm-workspace.yaml # Create package directories -RUN mkdir -p packages/frontend packages/bundle packages/buttplug +RUN mkdir -p packages/frontend packages/buttplug # Copy frontend artifacts COPY --from=builder --chown=node:node /app/packages/frontend/build ./packages/frontend/build COPY --from=builder --chown=node:node /app/packages/frontend/node_modules ./packages/frontend/node_modules COPY --from=builder --chown=node:node /app/packages/frontend/package.json ./packages/frontend/package.json -# Copy bundle artifacts -COPY --from=builder --chown=node:node /app/packages/bundle/dist ./packages/bundle/dist -COPY --from=builder --chown=node:node /app/packages/bundle/node_modules ./packages/bundle/node_modules -COPY --from=builder --chown=node:node /app/packages/bundle/package.json ./packages/bundle/package.json - # Copy buttplug artifacts COPY --from=builder --chown=node:node /app/packages/buttplug/dist ./packages/buttplug/dist COPY --from=builder --chown=node:node /app/packages/buttplug/node_modules ./packages/buttplug/node_modules diff --git a/Dockerfile.backend b/Dockerfile.backend new file mode 100644 index 0000000..00c535e --- /dev/null +++ b/Dockerfile.backend @@ -0,0 +1,59 @@ +# syntax=docker/dockerfile:1 + +# ============================================================================ +# Builder stage +# ============================================================================ +FROM node:22.11.0-slim AS builder + +RUN npm install -g corepack@latest && corepack enable + +WORKDIR /app + +COPY pnpm-workspace.yaml package.json pnpm-lock.yaml ./ +COPY packages/backend/package.json ./packages/backend/package.json + +RUN pnpm install --frozen-lockfile --filter @sexy.pivoine.art/backend + +COPY packages/backend ./packages/backend + +RUN pnpm --filter @sexy.pivoine.art/backend build + +RUN pnpm install -rP --filter @sexy.pivoine.art/backend + +# ============================================================================ +# Runner stage +# ============================================================================ +FROM node:22.11.0-slim AS runner + +RUN apt-get update && apt-get install -y \ + dumb-init \ + ffmpeg \ + wget \ + && rm -rf /var/lib/apt/lists/* + +RUN userdel -r node && \ + groupadd -r -g 1000 node && \ + useradd -r -u 1000 -g node -m -d /home/node -s /bin/bash node + +WORKDIR /home/node/app + +RUN mkdir -p packages/backend + +COPY --from=builder --chown=node:node /app/packages/backend/dist ./packages/backend/dist +COPY --from=builder --chown=node:node /app/packages/backend/node_modules ./packages/backend/node_modules +COPY --from=builder --chown=node:node /app/packages/backend/package.json ./packages/backend/package.json + +RUN mkdir -p /data/uploads && chown node:node /data/uploads + +USER node + +ENV NODE_ENV=production \ + PORT=4000 + +EXPOSE 4000 + +HEALTHCHECK --interval=30s --timeout=5s --start-period=20s --retries=3 \ + CMD wget --no-verbose --tries=1 --spider http://localhost:4000/health + +ENTRYPOINT ["dumb-init", "--"] +CMD ["node", "packages/backend/dist/index.js"] diff --git a/compose.yml b/compose.yml index 35206da..29c64af 100644 --- a/compose.yml +++ b/compose.yml @@ -27,54 +27,59 @@ services: interval: 10s timeout: 5s retries: 5 - directus: - image: directus/directus:11 - container_name: sexy_directus + backend: + build: + context: . + dockerfile: Dockerfile.backend + container_name: sexy_backend restart: unless-stopped ports: - - "8055:8055" + - "4000:4000" volumes: - - directus_uploads:/directus/uploads - - ./packages/bundle:/directus/extensions/sexy.pivoine.art + - uploads_data:/data/uploads environment: - DB_CLIENT: pg - DB_HOST: sexy_postgres - DB_PORT: 5432 - DB_DATABASE: sexy - DB_USER: sexy - DB_PASSWORD: sexy - ADMIN_EMAIL: admin@sexy - ADMIN_PASSWORD: admin - PUBLIC_URL: http://localhost:3000/api - CACHE_ENABLED: true - CACHE_AUTO_PURGE: true - CACHE_STORE: redis - REDIS: redis://sexy_redis:6379 - CORS_ENABLED: true + DATABASE_URL: postgresql://sexy:sexy@sexy_postgres:5432/sexy + REDIS_URL: redis://sexy_redis:6379 + UPLOAD_DIR: /data/uploads CORS_ORIGIN: http://localhost:3000 - SESSION_COOKIE_SECURE: false - SESSION_COOKIE_SAME_SITE: lax - SESSION_COOKIE_DOMAIN: localhost - EXTENSIONS_PATH: /directus/extensions - EXTENSIONS_AUTO_RELOAD: true - WEBSOCKETS_ENABLED: true - USER_REGISTER_URL_ALLOW_LIST: http://localhost:3000 - PASSWORD_RESET_URL_ALLOW_LIST: http://localhost:3000 - TZ: Europe/Amsterdam + PORT: 4000 + NODE_ENV: production + COOKIE_SECRET: change-me-in-production + SMTP_HOST: localhost + SMTP_PORT: 587 + EMAIL_FROM: noreply@sexy.pivoine.art + PUBLIC_URL: http://localhost:3000 depends_on: postgres: condition: service_healthy redis: condition: service_healthy healthcheck: - test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:8055/server/health"] + test: ["CMD", "wget", "--no-verbose", "--tries=1", "--spider", "http://localhost:4000/health"] interval: 30s timeout: 10s retries: 3 - start_period: 40s + start_period: 20s + frontend: + build: + context: . + dockerfile: Dockerfile + container_name: sexy_frontend + restart: unless-stopped + ports: + - "3000:3000" + environment: + NODE_ENV: production + PORT: 3000 + HOST: 0.0.0.0 + PUBLIC_API_URL: http://sexy_backend:4000 + PUBLIC_URL: http://localhost:3000 + depends_on: + backend: + condition: service_healthy volumes: - directus_uploads: + uploads_data: driver: local postgres_data: driver: local diff --git a/package.json b/package.json index d7472d9..0132799 100644 --- a/package.json +++ b/package.json @@ -5,11 +5,11 @@ "main": "index.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", - "build:bundle": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/bundle build", "build:frontend": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/frontend build", - "dev": "pnpm build:bundle && docker compose up -d && pnpm --filter @sexy.pivoine.art/frontend dev", - "schema:export": "docker compose exec directus node /directus/cli.js schema snapshot --yes /tmp/snapshot.yml && docker compose cp directus:/tmp/snapshot.yml ./directus.yml && docker compose exec db pg_dump -U sexy --schema-only sexy > schema.sql", - "schema:import": "docker compose exec -T postgres psql -U sexy sexy < schema.sql && docker compose cp ./directus.yml directus:/tmp/snapshot.yml && docker compose exec directus node /directus/cli.js schema apply --yes /tmp/snapshot.yml" + "build:backend": "git pull && pnpm install && pnpm --filter @sexy.pivoine.art/backend build", + "dev:data": "docker compose up -d postgres redis", + "dev:backend": "pnpm --filter @sexy.pivoine.art/backend dev", + "dev": "pnpm dev:data && pnpm dev:backend & pnpm --filter @sexy.pivoine.art/frontend dev" }, "keywords": [], "author": { diff --git a/packages/backend/drizzle.config.ts b/packages/backend/drizzle.config.ts new file mode 100644 index 0000000..89cd0fc --- /dev/null +++ b/packages/backend/drizzle.config.ts @@ -0,0 +1,10 @@ +import { defineConfig } from "drizzle-kit"; + +export default defineConfig({ + schema: "./src/db/schema/index.ts", + out: "./src/migrations", + dialect: "postgresql", + dbCredentials: { + url: process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy", + }, +}); diff --git a/packages/backend/package.json b/packages/backend/package.json new file mode 100644 index 0000000..aa9bb47 --- /dev/null +++ b/packages/backend/package.json @@ -0,0 +1,51 @@ +{ + "name": "@sexy.pivoine.art/backend", + "version": "1.0.0", + "type": "module", + "private": true, + "scripts": { + "dev": "tsx watch src/index.ts", + "build": "tsc", + "start": "node dist/index.js", + "db:generate": "drizzle-kit generate", + "db:migrate": "drizzle-kit migrate", + "db:studio": "drizzle-kit studio", + "migrate": "tsx src/scripts/data-migration.ts" + }, + "dependencies": { + "@fastify/cookie": "^11.0.2", + "@fastify/cors": "^10.0.2", + "@fastify/multipart": "^9.0.3", + "@fastify/static": "^8.1.1", + "@pothos/core": "^4.4.0", + "@pothos/plugin-errors": "^4.2.0", + "argon2": "^0.43.0", + "drizzle-orm": "^0.44.1", + "fastify": "^5.4.0", + "fluent-ffmpeg": "^2.1.3", + "graphql": "^16.11.0", + "graphql-scalars": "^1.24.2", + "graphql-ws": "^6.0.4", + "graphql-yoga": "^5.13.4", + "ioredis": "^5.6.1", + "nanoid": "^5.1.5", + "nodemailer": "^7.0.3", + "pg": "^8.16.0", + "slugify": "^1.6.6", + "uuid": "^11.1.0" + }, + "pnpm": { + "onlyBuiltDependencies": [ + "argon2" + ] + }, + "devDependencies": { + "@types/fluent-ffmpeg": "^2.1.27", + "@types/nodemailer": "^6.4.17", + "@types/pg": "^8.15.4", + "@types/uuid": "^10.0.0", + "drizzle-kit": "^0.31.1", + "tsx": "^4.19.4", + "typescript": "^5.9.3" + } +} diff --git a/packages/backend/src/db/connection.ts b/packages/backend/src/db/connection.ts new file mode 100644 index 0000000..c36eea7 --- /dev/null +++ b/packages/backend/src/db/connection.ts @@ -0,0 +1,11 @@ +import { drizzle } from "drizzle-orm/node-postgres"; +import { Pool } from "pg"; +import * as schema from "./schema/index.js"; + +const pool = new Pool({ + connectionString: process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy", + max: 20, +}); + +export const db = drizzle(pool, { schema }); +export type DB = typeof db; diff --git a/packages/backend/src/db/schema/articles.ts b/packages/backend/src/db/schema/articles.ts new file mode 100644 index 0000000..8a27541 --- /dev/null +++ b/packages/backend/src/db/schema/articles.ts @@ -0,0 +1,37 @@ +import { + pgTable, + text, + timestamp, + boolean, + index, + uniqueIndex, +} from "drizzle-orm/pg-core"; +import { users } from "./users.js"; +import { files } from "./files.js"; + +export const articles = pgTable( + "articles", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + slug: text("slug").notNull(), + title: text("title").notNull(), + excerpt: text("excerpt"), + content: text("content"), + image: text("image").references(() => files.id, { onDelete: "set null" }), + tags: text("tags").array().default([]), + publish_date: timestamp("publish_date").notNull().defaultNow(), + author: text("author").references(() => users.id, { onDelete: "set null" }), + category: text("category"), + featured: boolean("featured").default(false), + date_created: timestamp("date_created").notNull().defaultNow(), + date_updated: timestamp("date_updated"), + }, + (t) => [ + uniqueIndex("articles_slug_idx").on(t.slug), + index("articles_publish_date_idx").on(t.publish_date), + index("articles_featured_idx").on(t.featured), + ], +); + +export type Article = typeof articles.$inferSelect; +export type NewArticle = typeof articles.$inferInsert; diff --git a/packages/backend/src/db/schema/comments.ts b/packages/backend/src/db/schema/comments.ts new file mode 100644 index 0000000..15fea59 --- /dev/null +++ b/packages/backend/src/db/schema/comments.ts @@ -0,0 +1,30 @@ +import { + pgTable, + text, + timestamp, + index, + integer, +} from "drizzle-orm/pg-core"; +import { users } from "./users.js"; + +export const comments = pgTable( + "comments", + { + id: integer("id").primaryKey().generatedAlwaysAsIdentity(), + collection: text("collection").notNull(), // 'videos' | 'recordings' + item_id: text("item_id").notNull(), + comment: text("comment").notNull(), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + date_created: timestamp("date_created").notNull().defaultNow(), + date_updated: timestamp("date_updated"), + }, + (t) => [ + index("comments_collection_item_idx").on(t.collection, t.item_id), + index("comments_user_idx").on(t.user_id), + ], +); + +export type Comment = typeof comments.$inferSelect; +export type NewComment = typeof comments.$inferInsert; diff --git a/packages/backend/src/db/schema/files.ts b/packages/backend/src/db/schema/files.ts new file mode 100644 index 0000000..7c2c2a9 --- /dev/null +++ b/packages/backend/src/db/schema/files.ts @@ -0,0 +1,27 @@ +import { + pgTable, + text, + timestamp, + bigint, + integer, + index, +} from "drizzle-orm/pg-core"; + +export const files = pgTable( + "files", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + title: text("title"), + description: text("description"), + filename: text("filename").notNull(), + mime_type: text("mime_type"), + filesize: bigint("filesize", { mode: "number" }), + duration: integer("duration"), + uploaded_by: text("uploaded_by"), + date_created: timestamp("date_created").notNull().defaultNow(), + }, + (t) => [index("files_uploaded_by_idx").on(t.uploaded_by)], +); + +export type File = typeof files.$inferSelect; +export type NewFile = typeof files.$inferInsert; diff --git a/packages/backend/src/db/schema/gamification.ts b/packages/backend/src/db/schema/gamification.ts new file mode 100644 index 0000000..6af069e --- /dev/null +++ b/packages/backend/src/db/schema/gamification.ts @@ -0,0 +1,94 @@ +import { + pgTable, + text, + timestamp, + integer, + real, + index, + pgEnum, + uniqueIndex, +} from "drizzle-orm/pg-core"; +import { users } from "./users.js"; +import { recordings } from "./recordings.js"; + +export const achievementStatusEnum = pgEnum("achievement_status", [ + "draft", + "published", +]); + +export const achievements = pgTable( + "achievements", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + code: text("code").notNull(), + name: text("name").notNull(), + description: text("description"), + icon: text("icon"), + category: text("category"), + required_count: integer("required_count").notNull().default(1), + points_reward: integer("points_reward").notNull().default(0), + status: achievementStatusEnum("status").notNull().default("published"), + sort: integer("sort").default(0), + }, + (t) => [uniqueIndex("achievements_code_idx").on(t.code)], +); + +export const user_achievements = pgTable( + "user_achievements", + { + id: integer("id").primaryKey().generatedAlwaysAsIdentity(), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + achievement_id: text("achievement_id") + .notNull() + .references(() => achievements.id, { onDelete: "cascade" }), + progress: integer("progress").default(0), + date_unlocked: timestamp("date_unlocked"), + }, + (t) => [ + index("user_achievements_user_idx").on(t.user_id), + uniqueIndex("user_achievements_unique_idx").on(t.user_id, t.achievement_id), + ], +); + +export const user_points = pgTable( + "user_points", + { + id: integer("id").primaryKey().generatedAlwaysAsIdentity(), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + action: text("action").notNull(), + points: integer("points").notNull(), + recording_id: text("recording_id").references(() => recordings.id, { + onDelete: "set null", + }), + date_created: timestamp("date_created").notNull().defaultNow(), + }, + (t) => [ + index("user_points_user_idx").on(t.user_id), + index("user_points_date_idx").on(t.date_created), + ], +); + +export const user_stats = pgTable( + "user_stats", + { + id: integer("id").primaryKey().generatedAlwaysAsIdentity(), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + total_raw_points: integer("total_raw_points").default(0), + total_weighted_points: real("total_weighted_points").default(0), + recordings_count: integer("recordings_count").default(0), + playbacks_count: integer("playbacks_count").default(0), + comments_count: integer("comments_count").default(0), + achievements_count: integer("achievements_count").default(0), + last_updated: timestamp("last_updated").defaultNow(), + }, + (t) => [uniqueIndex("user_stats_user_idx").on(t.user_id)], +); + +export type Achievement = typeof achievements.$inferSelect; +export type UserStats = typeof user_stats.$inferSelect; diff --git a/packages/backend/src/db/schema/index.ts b/packages/backend/src/db/schema/index.ts new file mode 100644 index 0000000..86b99b5 --- /dev/null +++ b/packages/backend/src/db/schema/index.ts @@ -0,0 +1,7 @@ +export * from "./files.js"; +export * from "./users.js"; +export * from "./videos.js"; +export * from "./articles.js"; +export * from "./recordings.js"; +export * from "./comments.js"; +export * from "./gamification.js"; diff --git a/packages/backend/src/db/schema/recordings.ts b/packages/backend/src/db/schema/recordings.ts new file mode 100644 index 0000000..245c7af --- /dev/null +++ b/packages/backend/src/db/schema/recordings.ts @@ -0,0 +1,73 @@ +import { + pgTable, + text, + timestamp, + boolean, + integer, + pgEnum, + index, + uniqueIndex, + jsonb, +} from "drizzle-orm/pg-core"; +import { users } from "./users.js"; +import { videos } from "./videos.js"; + +export const recordingStatusEnum = pgEnum("recording_status", [ + "draft", + "published", + "archived", +]); + +export const recordings = pgTable( + "recordings", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + title: text("title").notNull(), + description: text("description"), + slug: text("slug").notNull(), + duration: integer("duration").notNull(), + events: jsonb("events").$type().default([]), + device_info: jsonb("device_info").$type().default([]), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + status: recordingStatusEnum("status").notNull().default("draft"), + tags: text("tags").array().default([]), + linked_video: text("linked_video").references(() => videos.id, { + onDelete: "set null", + }), + featured: boolean("featured").default(false), + public: boolean("public").default(false), + original_recording_id: text("original_recording_id"), + date_created: timestamp("date_created").notNull().defaultNow(), + date_updated: timestamp("date_updated"), + }, + (t) => [ + uniqueIndex("recordings_slug_idx").on(t.slug), + index("recordings_user_idx").on(t.user_id), + index("recordings_status_idx").on(t.status), + index("recordings_public_idx").on(t.public), + ], +); + +export const recording_plays = pgTable( + "recording_plays", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + recording_id: text("recording_id") + .notNull() + .references(() => recordings.id, { onDelete: "cascade" }), + user_id: text("user_id").references(() => users.id, { onDelete: "set null" }), + duration_played: integer("duration_played").default(0), + completed: boolean("completed").default(false), + date_created: timestamp("date_created").notNull().defaultNow(), + date_updated: timestamp("date_updated"), + }, + (t) => [ + index("recording_plays_recording_idx").on(t.recording_id), + index("recording_plays_user_idx").on(t.user_id), + ], +); + +export type Recording = typeof recordings.$inferSelect; +export type NewRecording = typeof recordings.$inferInsert; diff --git a/packages/backend/src/db/schema/users.ts b/packages/backend/src/db/schema/users.ts new file mode 100644 index 0000000..e7e622a --- /dev/null +++ b/packages/backend/src/db/schema/users.ts @@ -0,0 +1,60 @@ +import { + pgTable, + text, + timestamp, + pgEnum, + boolean, + index, + uniqueIndex, + integer, +} from "drizzle-orm/pg-core"; +import { files } from "./files.js"; + +export const roleEnum = pgEnum("user_role", ["model", "viewer", "admin"]); + +export const users = pgTable( + "users", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + email: text("email").notNull(), + password_hash: text("password_hash").notNull(), + first_name: text("first_name"), + last_name: text("last_name"), + artist_name: text("artist_name"), + slug: text("slug"), + description: text("description"), + tags: text("tags").array().default([]), + role: roleEnum("role").notNull().default("viewer"), + avatar: text("avatar").references(() => files.id, { onDelete: "set null" }), + banner: text("banner").references(() => files.id, { onDelete: "set null" }), + email_verified: boolean("email_verified").notNull().default(false), + email_verify_token: text("email_verify_token"), + password_reset_token: text("password_reset_token"), + password_reset_expiry: timestamp("password_reset_expiry"), + date_created: timestamp("date_created").notNull().defaultNow(), + date_updated: timestamp("date_updated"), + }, + (t) => [ + uniqueIndex("users_email_idx").on(t.email), + uniqueIndex("users_slug_idx").on(t.slug), + index("users_role_idx").on(t.role), + ], +); + +export const user_photos = pgTable( + "user_photos", + { + id: integer("id").primaryKey().generatedAlwaysAsIdentity(), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + file_id: text("file_id") + .notNull() + .references(() => files.id, { onDelete: "cascade" }), + sort: integer("sort").default(0), + }, + (t) => [index("user_photos_user_idx").on(t.user_id)], +); + +export type User = typeof users.$inferSelect; +export type NewUser = typeof users.$inferInsert; diff --git a/packages/backend/src/db/schema/videos.ts b/packages/backend/src/db/schema/videos.ts new file mode 100644 index 0000000..27756be --- /dev/null +++ b/packages/backend/src/db/schema/videos.ts @@ -0,0 +1,90 @@ +import { + pgTable, + text, + timestamp, + boolean, + integer, + index, + uniqueIndex, + primaryKey, +} from "drizzle-orm/pg-core"; +import { users } from "./users.js"; +import { files } from "./files.js"; + +export const videos = pgTable( + "videos", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + slug: text("slug").notNull(), + title: text("title").notNull(), + description: text("description"), + image: text("image").references(() => files.id, { onDelete: "set null" }), + movie: text("movie").references(() => files.id, { onDelete: "set null" }), + tags: text("tags").array().default([]), + upload_date: timestamp("upload_date").notNull().defaultNow(), + premium: boolean("premium").default(false), + featured: boolean("featured").default(false), + likes_count: integer("likes_count").default(0), + plays_count: integer("plays_count").default(0), + }, + (t) => [ + uniqueIndex("videos_slug_idx").on(t.slug), + index("videos_upload_date_idx").on(t.upload_date), + index("videos_featured_idx").on(t.featured), + ], +); + +export const video_models = pgTable( + "video_models", + { + video_id: text("video_id") + .notNull() + .references(() => videos.id, { onDelete: "cascade" }), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + }, + (t) => [primaryKey({ columns: [t.video_id, t.user_id] })], +); + +export const video_likes = pgTable( + "video_likes", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + video_id: text("video_id") + .notNull() + .references(() => videos.id, { onDelete: "cascade" }), + user_id: text("user_id") + .notNull() + .references(() => users.id, { onDelete: "cascade" }), + date_created: timestamp("date_created").notNull().defaultNow(), + }, + (t) => [ + index("video_likes_video_idx").on(t.video_id), + index("video_likes_user_idx").on(t.user_id), + ], +); + +export const video_plays = pgTable( + "video_plays", + { + id: text("id").primaryKey().$defaultFn(() => crypto.randomUUID()), + video_id: text("video_id") + .notNull() + .references(() => videos.id, { onDelete: "cascade" }), + user_id: text("user_id").references(() => users.id, { onDelete: "set null" }), + session_id: text("session_id"), + duration_watched: integer("duration_watched"), + completed: boolean("completed").default(false), + date_created: timestamp("date_created").notNull().defaultNow(), + date_updated: timestamp("date_updated"), + }, + (t) => [ + index("video_plays_video_idx").on(t.video_id), + index("video_plays_user_idx").on(t.user_id), + index("video_plays_date_idx").on(t.date_created), + ], +); + +export type Video = typeof videos.$inferSelect; +export type NewVideo = typeof videos.$inferInsert; diff --git a/packages/backend/src/graphql/builder.ts b/packages/backend/src/graphql/builder.ts new file mode 100644 index 0000000..7fdc0ac --- /dev/null +++ b/packages/backend/src/graphql/builder.ts @@ -0,0 +1,30 @@ +import SchemaBuilder from "@pothos/core"; +import ErrorsPlugin from "@pothos/plugin-errors"; +import type { DB } from "../db/connection.js"; +import type { SessionUser } from "../lib/auth.js"; +import type Redis from "ioredis"; +import { GraphQLDateTime, GraphQLJSON } from "graphql-scalars"; + +export type Context = { + db: DB; + redis: InstanceType; + currentUser: SessionUser | null; + request: Request; + reply: unknown; +}; + +export const builder = new SchemaBuilder<{ + Context: Context; + Scalars: { + DateTime: { Input: Date; Output: Date }; + JSON: { Input: unknown; Output: unknown }; + }; +}>({ + plugins: [ErrorsPlugin], +}); + +builder.addScalarType("DateTime", GraphQLDateTime, {}); +builder.addScalarType("JSON", GraphQLJSON, {}); + +builder.queryType({}); +builder.mutationType({}); diff --git a/packages/backend/src/graphql/context.ts b/packages/backend/src/graphql/context.ts new file mode 100644 index 0000000..cb5b049 --- /dev/null +++ b/packages/backend/src/graphql/context.ts @@ -0,0 +1,29 @@ +import type { YogaInitialContext } from "graphql-yoga"; +import type { Context } from "./builder.js"; +import { getSession } from "../lib/auth.js"; +import { db } from "../db/connection.js"; +import { redis } from "../lib/auth.js"; + +export async function buildContext(ctx: YogaInitialContext & { request: Request; reply: unknown; db: typeof db; redis: typeof redis }): Promise { + const request = ctx.request; + const cookieHeader = request.headers.get("cookie") || ""; + + // Parse session_token from cookies + const cookies = Object.fromEntries( + cookieHeader.split(";").map((c) => { + const [k, ...v] = c.trim().split("="); + return [k.trim(), v.join("=")]; + }), + ); + + const token = cookies["session_token"]; + const currentUser = token ? await getSession(token) : null; + + return { + db: ctx.db || db, + redis: ctx.redis || redis, + currentUser, + request, + reply: ctx.reply, + }; +} diff --git a/packages/backend/src/graphql/index.ts b/packages/backend/src/graphql/index.ts new file mode 100644 index 0000000..41a58a3 --- /dev/null +++ b/packages/backend/src/graphql/index.ts @@ -0,0 +1,14 @@ +import "./builder.js"; +import "./types/index.js"; +import "./resolvers/auth.js"; +import "./resolvers/users.js"; +import "./resolvers/videos.js"; +import "./resolvers/models.js"; +import "./resolvers/articles.js"; +import "./resolvers/recordings.js"; +import "./resolvers/comments.js"; +import "./resolvers/gamification.js"; +import "./resolvers/stats.js"; +import { builder } from "./builder.js"; + +export const schema = builder.toSchema(); diff --git a/packages/backend/src/graphql/resolvers/articles.ts b/packages/backend/src/graphql/resolvers/articles.ts new file mode 100644 index 0000000..0c9c487 --- /dev/null +++ b/packages/backend/src/graphql/resolvers/articles.ts @@ -0,0 +1,83 @@ +import { builder } from "../builder.js"; +import { ArticleType } from "../types/index.js"; +import { articles, users } from "../../db/schema/index.js"; +import { eq, and, lte, desc } from "drizzle-orm"; + +builder.queryField("articles", (t) => + t.field({ + type: [ArticleType], + args: { + featured: t.arg.boolean(), + limit: t.arg.int(), + }, + resolve: async (_root, args, ctx) => { + let query = ctx.db + .select() + .from(articles) + .where(lte(articles.publish_date, new Date())) + .orderBy(desc(articles.publish_date)); + + if (args.limit) { + query = (query as any).limit(args.limit); + } + + const articleList = await query; + + return Promise.all( + articleList.map(async (article: any) => { + let author = null; + if (article.author) { + const authorUser = await ctx.db + .select({ + first_name: users.first_name, + last_name: users.last_name, + avatar: users.avatar, + description: users.description, + }) + .from(users) + .where(eq(users.id, article.author)) + .limit(1); + author = authorUser[0] || null; + } + return { ...article, author }; + }), + ); + }, + }), +); + +builder.queryField("article", (t) => + t.field({ + type: ArticleType, + nullable: true, + args: { + slug: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const article = await ctx.db + .select() + .from(articles) + .where(and(eq(articles.slug, args.slug), lte(articles.publish_date, new Date()))) + .limit(1); + + if (!article[0]) return null; + + let author = null; + if (article[0].author) { + const authorUser = await ctx.db + .select({ + first_name: users.first_name, + last_name: users.last_name, + avatar: users.avatar, + description: users.description, + }) + .from(users) + .where(eq(users.id, article[0].author)) + .limit(1); + author = authorUser[0] || null; + } + + return { ...article[0], author }; + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/auth.ts b/packages/backend/src/graphql/resolvers/auth.ts new file mode 100644 index 0000000..6fc8169 --- /dev/null +++ b/packages/backend/src/graphql/resolvers/auth.ts @@ -0,0 +1,226 @@ +import { GraphQLError } from "graphql"; +import { builder } from "../builder.js"; +import { CurrentUserType } from "../types/index.js"; +import { users } from "../../db/schema/index.js"; +import { eq } from "drizzle-orm"; +import { hash, verify as verifyArgon } from "../../lib/argon.js"; +import { setSession, deleteSession } from "../../lib/auth.js"; +import { sendVerification, sendPasswordReset } from "../../lib/email.js"; +import { slugify } from "../../lib/slugify.js"; +import { nanoid } from "nanoid"; + +builder.mutationField("login", (t) => + t.field({ + type: CurrentUserType, + args: { + email: t.arg.string({ required: true }), + password: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const user = await ctx.db + .select() + .from(users) + .where(eq(users.email, args.email.toLowerCase())) + .limit(1); + + if (!user[0]) throw new GraphQLError("Invalid credentials"); + + const valid = await verifyArgon(user[0].password_hash, args.password); + if (!valid) throw new GraphQLError("Invalid credentials"); + + const token = nanoid(32); + const sessionUser = { + id: user[0].id, + email: user[0].email, + role: user[0].role, + first_name: user[0].first_name, + last_name: user[0].last_name, + artist_name: user[0].artist_name, + slug: user[0].slug, + avatar: user[0].avatar, + }; + + await setSession(token, sessionUser); + + // Set session cookie + const isProduction = process.env.NODE_ENV === "production"; + const cookieValue = `session_token=${token}; HttpOnly; Path=/; SameSite=Lax; Max-Age=86400${isProduction ? "; Secure" : ""}`; + (ctx.reply as any).header?.("Set-Cookie", cookieValue); + + // For graphql-yoga response + if ((ctx as any).serverResponse) { + (ctx as any).serverResponse.setHeader("Set-Cookie", cookieValue); + } + + return user[0]; + }, + }), +); + +builder.mutationField("logout", (t) => + t.field({ + type: "Boolean", + resolve: async (_root, _args, ctx) => { + const cookieHeader = ctx.request.headers.get("cookie") || ""; + const cookies = Object.fromEntries( + cookieHeader.split(";").map((c) => { + const [k, ...v] = c.trim().split("="); + return [k.trim(), v.join("=")]; + }), + ); + const token = cookies["session_token"]; + if (token) { + await deleteSession(token); + } + // Clear cookie + const cookieValue = "session_token=; HttpOnly; Path=/; Max-Age=0"; + (ctx.reply as any).header?.("Set-Cookie", cookieValue); + return true; + }, + }), +); + +builder.mutationField("register", (t) => + t.field({ + type: "Boolean", + args: { + email: t.arg.string({ required: true }), + password: t.arg.string({ required: true }), + firstName: t.arg.string({ required: true }), + lastName: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const existing = await ctx.db + .select({ id: users.id }) + .from(users) + .where(eq(users.email, args.email.toLowerCase())) + .limit(1); + + if (existing.length > 0) throw new GraphQLError("Email already registered"); + + const passwordHash = await hash(args.password); + const artistName = `${args.firstName} ${args.lastName}`; + const baseSlug = slugify(artistName); + const verifyToken = nanoid(32); + + // Ensure unique slug + let slug = baseSlug; + let attempt = 0; + while (true) { + const existing = await ctx.db + .select({ id: users.id }) + .from(users) + .where(eq(users.slug, slug)) + .limit(1); + if (existing.length === 0) break; + attempt++; + slug = `${baseSlug}-${attempt}`; + } + + await ctx.db.insert(users).values({ + email: args.email.toLowerCase(), + password_hash: passwordHash, + first_name: args.firstName, + last_name: args.lastName, + artist_name: artistName, + slug, + role: "viewer", + email_verify_token: verifyToken, + email_verified: false, + }); + + await sendVerification(args.email, verifyToken); + return true; + }, + }), +); + +builder.mutationField("verifyEmail", (t) => + t.field({ + type: "Boolean", + args: { + token: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const user = await ctx.db + .select() + .from(users) + .where(eq(users.email_verify_token, args.token)) + .limit(1); + + if (!user[0]) throw new GraphQLError("Invalid verification token"); + + await ctx.db + .update(users) + .set({ email_verified: true, email_verify_token: null }) + .where(eq(users.id, user[0].id)); + + return true; + }, + }), +); + +builder.mutationField("requestPasswordReset", (t) => + t.field({ + type: "Boolean", + args: { + email: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const user = await ctx.db + .select() + .from(users) + .where(eq(users.email, args.email.toLowerCase())) + .limit(1); + + // Always return true to prevent email enumeration + if (!user[0]) return true; + + const token = nanoid(32); + const expiry = new Date(Date.now() + 60 * 60 * 1000); // 1 hour + + await ctx.db + .update(users) + .set({ password_reset_token: token, password_reset_expiry: expiry }) + .where(eq(users.id, user[0].id)); + + await sendPasswordReset(args.email, token); + return true; + }, + }), +); + +builder.mutationField("resetPassword", (t) => + t.field({ + type: "Boolean", + args: { + token: t.arg.string({ required: true }), + newPassword: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const user = await ctx.db + .select() + .from(users) + .where(eq(users.password_reset_token, args.token)) + .limit(1); + + if (!user[0]) throw new GraphQLError("Invalid or expired reset token"); + if (user[0].password_reset_expiry && user[0].password_reset_expiry < new Date()) { + throw new GraphQLError("Reset token expired"); + } + + const passwordHash = await hash(args.newPassword); + + await ctx.db + .update(users) + .set({ + password_hash: passwordHash, + password_reset_token: null, + password_reset_expiry: null, + }) + .where(eq(users.id, user[0].id)); + + return true; + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/comments.ts b/packages/backend/src/graphql/resolvers/comments.ts new file mode 100644 index 0000000..8d909c6 --- /dev/null +++ b/packages/backend/src/graphql/resolvers/comments.ts @@ -0,0 +1,68 @@ +import { GraphQLError } from "graphql"; +import { builder } from "../builder.js"; +import { CommentType } from "../types/index.js"; +import { comments, users } from "../../db/schema/index.js"; +import { eq, and, desc } from "drizzle-orm"; +import { awardPoints, checkAchievements } from "../../lib/gamification.js"; + +builder.queryField("commentsForVideo", (t) => + t.field({ + type: [CommentType], + args: { + videoId: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const commentList = await ctx.db + .select() + .from(comments) + .where(and(eq(comments.collection, "videos"), eq(comments.item_id, args.videoId))) + .orderBy(desc(comments.date_created)); + + return Promise.all( + commentList.map(async (c: any) => { + const user = await ctx.db + .select({ id: users.id, first_name: users.first_name, last_name: users.last_name, avatar: users.avatar }) + .from(users) + .where(eq(users.id, c.user_id)) + .limit(1); + return { ...c, user: user[0] || null }; + }), + ); + }, + }), +); + +builder.mutationField("createCommentForVideo", (t) => + t.field({ + type: CommentType, + args: { + videoId: t.arg.string({ required: true }), + comment: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const newComment = await ctx.db + .insert(comments) + .values({ + collection: "videos", + item_id: args.videoId, + comment: args.comment, + user_id: ctx.currentUser.id, + }) + .returning(); + + // Gamification + await awardPoints(ctx.db, ctx.currentUser.id, "COMMENT_CREATE"); + await checkAchievements(ctx.db, ctx.currentUser.id, "social"); + + const user = await ctx.db + .select({ id: users.id, first_name: users.first_name, last_name: users.last_name, avatar: users.avatar }) + .from(users) + .where(eq(users.id, ctx.currentUser.id)) + .limit(1); + + return { ...newComment[0], user: user[0] || null }; + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/gamification.ts b/packages/backend/src/graphql/resolvers/gamification.ts new file mode 100644 index 0000000..c0da1fb --- /dev/null +++ b/packages/backend/src/graphql/resolvers/gamification.ts @@ -0,0 +1,115 @@ +import { builder } from "../builder.js"; +import { LeaderboardEntryType, UserGamificationType, AchievementType } from "../types/index.js"; +import { user_stats, users, user_achievements, achievements, user_points } from "../../db/schema/index.js"; +import { eq, desc, gt, count, isNotNull } from "drizzle-orm"; + +builder.queryField("leaderboard", (t) => + t.field({ + type: [LeaderboardEntryType], + args: { + limit: t.arg.int(), + offset: t.arg.int(), + }, + resolve: async (_root, args, ctx) => { + const limit = Math.min(args.limit || 100, 500); + const offset = args.offset || 0; + + const entries = await ctx.db + .select({ + user_id: user_stats.user_id, + display_name: users.artist_name, + avatar: users.avatar, + total_weighted_points: user_stats.total_weighted_points, + total_raw_points: user_stats.total_raw_points, + recordings_count: user_stats.recordings_count, + playbacks_count: user_stats.playbacks_count, + achievements_count: user_stats.achievements_count, + }) + .from(user_stats) + .leftJoin(users, eq(user_stats.user_id, users.id)) + .orderBy(desc(user_stats.total_weighted_points)) + .limit(limit) + .offset(offset); + + return entries.map((e: any, i: number) => ({ ...e, rank: offset + i + 1 })); + }, + }), +); + +builder.queryField("userGamification", (t) => + t.field({ + type: UserGamificationType, + nullable: true, + args: { + userId: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const stats = await ctx.db + .select() + .from(user_stats) + .where(eq(user_stats.user_id, args.userId)) + .limit(1); + + let rank = 1; + if (stats[0]) { + const rankResult = await ctx.db + .select({ count: count() }) + .from(user_stats) + .where(gt(user_stats.total_weighted_points, stats[0].total_weighted_points || 0)); + rank = (rankResult[0]?.count || 0) + 1; + } + + const userAchievements = await ctx.db + .select({ + id: achievements.id, + code: achievements.code, + name: achievements.name, + description: achievements.description, + icon: achievements.icon, + category: achievements.category, + date_unlocked: user_achievements.date_unlocked, + progress: user_achievements.progress, + required_count: achievements.required_count, + }) + .from(user_achievements) + .leftJoin(achievements, eq(user_achievements.achievement_id, achievements.id)) + .where(eq(user_achievements.user_id, args.userId)) + .where(isNotNull(user_achievements.date_unlocked)) + .orderBy(desc(user_achievements.date_unlocked)); + + const recentPoints = await ctx.db + .select({ + action: user_points.action, + points: user_points.points, + date_created: user_points.date_created, + recording_id: user_points.recording_id, + }) + .from(user_points) + .where(eq(user_points.user_id, args.userId)) + .orderBy(desc(user_points.date_created)) + .limit(10); + + return { + stats: stats[0] ? { ...stats[0], rank } : null, + achievements: userAchievements.map((a: any) => ({ + ...a, + date_unlocked: a.date_unlocked!, + })), + recent_points: recentPoints, + }; + }, + }), +); + +builder.queryField("achievements", (t) => + t.field({ + type: [AchievementType], + resolve: async (_root, _args, ctx) => { + return ctx.db + .select() + .from(achievements) + .where(eq(achievements.status, "published")) + .orderBy(achievements.sort); + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/models.ts b/packages/backend/src/graphql/resolvers/models.ts new file mode 100644 index 0000000..3589a0d --- /dev/null +++ b/packages/backend/src/graphql/resolvers/models.ts @@ -0,0 +1,63 @@ +import { builder } from "../builder.js"; +import { ModelType } from "../types/index.js"; +import { users, user_photos, files } from "../../db/schema/index.js"; +import { eq, and, desc } from "drizzle-orm"; + +async function enrichModel(db: any, user: any) { + // Fetch photos + const photoRows = await db + .select({ id: files.id, filename: files.filename }) + .from(user_photos) + .leftJoin(files, eq(user_photos.file_id, files.id)) + .where(eq(user_photos.user_id, user.id)) + .orderBy(user_photos.sort); + + return { + ...user, + photos: photoRows.map((p: any) => ({ id: p.id, filename: p.filename })), + }; +} + +builder.queryField("models", (t) => + t.field({ + type: [ModelType], + args: { + featured: t.arg.boolean(), + limit: t.arg.int(), + }, + resolve: async (_root, args, ctx) => { + let query = ctx.db + .select() + .from(users) + .where(eq(users.role, "model")) + .orderBy(desc(users.date_created)); + + if (args.limit) { + query = (query as any).limit(args.limit); + } + + const modelList = await query; + return Promise.all(modelList.map((m: any) => enrichModel(ctx.db, m))); + }, + }), +); + +builder.queryField("model", (t) => + t.field({ + type: ModelType, + nullable: true, + args: { + slug: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const model = await ctx.db + .select() + .from(users) + .where(and(eq(users.slug, args.slug), eq(users.role, "model"))) + .limit(1); + + if (!model[0]) return null; + return enrichModel(ctx.db, model[0]); + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/recordings.ts b/packages/backend/src/graphql/resolvers/recordings.ts new file mode 100644 index 0000000..074b270 --- /dev/null +++ b/packages/backend/src/graphql/resolvers/recordings.ts @@ -0,0 +1,333 @@ +import { GraphQLError } from "graphql"; +import { builder } from "../builder.js"; +import { RecordingType } from "../types/index.js"; +import { recordings, recording_plays } from "../../db/schema/index.js"; +import { eq, and, desc } from "drizzle-orm"; +import { slugify } from "../../lib/slugify.js"; +import { awardPoints, checkAchievements } from "../../lib/gamification.js"; + +builder.queryField("recordings", (t) => + t.field({ + type: [RecordingType], + args: { + status: t.arg.string(), + tags: t.arg.string(), + linkedVideoId: t.arg.string(), + limit: t.arg.int(), + page: t.arg.int(), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const conditions = [eq(recordings.user_id, ctx.currentUser.id)]; + if (args.status) conditions.push(eq(recordings.status, args.status as any)); + if (args.linkedVideoId) conditions.push(eq(recordings.linked_video, args.linkedVideoId)); + + const limit = args.limit || 50; + const page = args.page || 1; + const offset = (page - 1) * limit; + + return ctx.db + .select() + .from(recordings) + .where(and(...conditions)) + .orderBy(desc(recordings.date_created)) + .limit(limit) + .offset(offset); + }, + }), +); + +builder.queryField("recording", (t) => + t.field({ + type: RecordingType, + nullable: true, + args: { + id: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const recording = await ctx.db + .select() + .from(recordings) + .where(eq(recordings.id, args.id)) + .limit(1); + + if (!recording[0]) return null; + + if (recording[0].user_id !== ctx.currentUser.id && !recording[0].public) { + throw new GraphQLError("Forbidden"); + } + + return recording[0]; + }, + }), +); + +builder.queryField("communityRecordings", (t) => + t.field({ + type: [RecordingType], + args: { + limit: t.arg.int(), + offset: t.arg.int(), + }, + resolve: async (_root, args, ctx) => { + return ctx.db + .select() + .from(recordings) + .where(and(eq(recordings.status, "published"), eq(recordings.public, true))) + .orderBy(desc(recordings.date_created)) + .limit(args.limit || 50) + .offset(args.offset || 0); + }, + }), +); + +builder.mutationField("createRecording", (t) => + t.field({ + type: RecordingType, + args: { + title: t.arg.string({ required: true }), + description: t.arg.string(), + duration: t.arg.int({ required: true }), + events: t.arg({ type: "JSON", required: true }), + deviceInfo: t.arg({ type: "JSON", required: true }), + tags: t.arg.stringList(), + status: t.arg.string(), + linkedVideoId: t.arg.string(), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const slug = slugify(args.title); + + const newRecording = await ctx.db + .insert(recordings) + .values({ + title: args.title, + description: args.description || null, + slug, + duration: args.duration, + events: (args.events as object[]) || [], + device_info: (args.deviceInfo as object[]) || [], + user_id: ctx.currentUser.id, + tags: args.tags || [], + linked_video: args.linkedVideoId || null, + status: (args.status as any) || "draft", + public: false, + }) + .returning(); + + const recording = newRecording[0]; + + // Gamification: award points if published + if (recording.status === "published") { + await awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_CREATE", recording.id); + await checkAchievements(ctx.db, ctx.currentUser.id, "recordings"); + } + + return recording; + }, + }), +); + +builder.mutationField("updateRecording", (t) => + t.field({ + type: RecordingType, + nullable: true, + args: { + id: t.arg.string({ required: true }), + title: t.arg.string(), + description: t.arg.string(), + tags: t.arg.stringList(), + status: t.arg.string(), + public: t.arg.boolean(), + linkedVideoId: t.arg.string(), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const existing = await ctx.db + .select() + .from(recordings) + .where(eq(recordings.id, args.id)) + .limit(1); + + if (!existing[0]) throw new GraphQLError("Recording not found"); + if (existing[0].user_id !== ctx.currentUser.id) throw new GraphQLError("Forbidden"); + + const updates: Record = { date_updated: new Date() }; + if (args.title !== null && args.title !== undefined) { + updates.title = args.title; + updates.slug = slugify(args.title); + } + if (args.description !== null && args.description !== undefined) updates.description = args.description; + if (args.tags !== null && args.tags !== undefined) updates.tags = args.tags; + if (args.status !== null && args.status !== undefined) updates.status = args.status; + if (args.public !== null && args.public !== undefined) updates.public = args.public; + if (args.linkedVideoId !== null && args.linkedVideoId !== undefined) updates.linked_video = args.linkedVideoId; + + const updated = await ctx.db + .update(recordings) + .set(updates as any) + .where(eq(recordings.id, args.id)) + .returning(); + + const recording = updated[0]; + + // Gamification: if newly published + if (args.status === "published" && existing[0].status !== "published") { + await awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_CREATE", recording.id); + await checkAchievements(ctx.db, ctx.currentUser.id, "recordings"); + } + if (args.status === "published" && recording.featured && !existing[0].featured) { + await awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_FEATURED", recording.id); + await checkAchievements(ctx.db, ctx.currentUser.id, "recordings"); + } + + return recording; + }, + }), +); + +builder.mutationField("deleteRecording", (t) => + t.field({ + type: "Boolean", + args: { + id: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const existing = await ctx.db + .select() + .from(recordings) + .where(eq(recordings.id, args.id)) + .limit(1); + + if (!existing[0]) throw new GraphQLError("Recording not found"); + if (existing[0].user_id !== ctx.currentUser.id) throw new GraphQLError("Forbidden"); + + await ctx.db + .update(recordings) + .set({ status: "archived", date_updated: new Date() }) + .where(eq(recordings.id, args.id)); + + return true; + }, + }), +); + +builder.mutationField("duplicateRecording", (t) => + t.field({ + type: RecordingType, + args: { + id: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const original = await ctx.db + .select() + .from(recordings) + .where(eq(recordings.id, args.id)) + .limit(1); + + if (!original[0]) throw new GraphQLError("Recording not found"); + if (original[0].status !== "published" || !original[0].public) { + throw new GraphQLError("Recording is not publicly shared"); + } + + const slug = `${slugify(original[0].title)}-copy-${Date.now()}`; + + const duplicated = await ctx.db + .insert(recordings) + .values({ + title: `${original[0].title} (Copy)`, + description: original[0].description, + slug, + duration: original[0].duration, + events: original[0].events || [], + device_info: original[0].device_info || [], + user_id: ctx.currentUser.id, + tags: original[0].tags || [], + status: "draft", + public: false, + original_recording_id: original[0].id, + }) + .returning(); + + return duplicated[0]; + }, + }), +); + +builder.mutationField("recordRecordingPlay", (t) => + t.field({ + type: "JSON", + args: { + recordingId: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const recording = await ctx.db + .select() + .from(recordings) + .where(eq(recordings.id, args.recordingId)) + .limit(1); + + if (!recording[0]) throw new GraphQLError("Recording not found"); + + const play = await ctx.db + .insert(recording_plays) + .values({ + recording_id: args.recordingId, + user_id: ctx.currentUser?.id || null, + duration_played: 0, + completed: false, + }) + .returning({ id: recording_plays.id }); + + // Gamification + if (ctx.currentUser && recording[0].user_id !== ctx.currentUser.id) { + await awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_PLAY", args.recordingId); + await checkAchievements(ctx.db, ctx.currentUser.id, "playback"); + } + + return { success: true, play_id: play[0].id }; + }, + }), +); + +builder.mutationField("updateRecordingPlay", (t) => + t.field({ + type: "Boolean", + args: { + playId: t.arg.string({ required: true }), + durationPlayed: t.arg.int({ required: true }), + completed: t.arg.boolean({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const existing = await ctx.db + .select() + .from(recording_plays) + .where(eq(recording_plays.id, args.playId)) + .limit(1); + + if (!existing[0]) throw new GraphQLError("Play record not found"); + const wasCompleted = existing[0].completed; + + await ctx.db + .update(recording_plays) + .set({ duration_played: args.durationPlayed, completed: args.completed, date_updated: new Date() }) + .where(eq(recording_plays.id, args.playId)); + + if (args.completed && !wasCompleted && ctx.currentUser) { + await awardPoints(ctx.db, ctx.currentUser.id, "RECORDING_COMPLETE", existing[0].recording_id); + await checkAchievements(ctx.db, ctx.currentUser.id, "playback"); + } + + return true; + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/stats.ts b/packages/backend/src/graphql/resolvers/stats.ts new file mode 100644 index 0000000..d3c652e --- /dev/null +++ b/packages/backend/src/graphql/resolvers/stats.ts @@ -0,0 +1,29 @@ +import { builder } from "../builder.js"; +import { StatsType } from "../types/index.js"; +import { users, videos } from "../../db/schema/index.js"; +import { eq, count } from "drizzle-orm"; + +builder.queryField("stats", (t) => + t.field({ + type: StatsType, + resolve: async (_root, _args, ctx) => { + const modelsCount = await ctx.db + .select({ count: count() }) + .from(users) + .where(eq(users.role, "model")); + const viewersCount = await ctx.db + .select({ count: count() }) + .from(users) + .where(eq(users.role, "viewer")); + const videosCount = await ctx.db + .select({ count: count() }) + .from(videos); + + return { + models_count: modelsCount[0]?.count || 0, + viewers_count: viewersCount[0]?.count || 0, + videos_count: videosCount[0]?.count || 0, + }; + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/users.ts b/packages/backend/src/graphql/resolvers/users.ts new file mode 100644 index 0000000..aff6ae5 --- /dev/null +++ b/packages/backend/src/graphql/resolvers/users.ts @@ -0,0 +1,72 @@ +import { GraphQLError } from "graphql"; +import { builder } from "../builder.js"; +import { CurrentUserType, UserType } from "../types/index.js"; +import { users } from "../../db/schema/index.js"; +import { eq } from "drizzle-orm"; + +builder.queryField("me", (t) => + t.field({ + type: CurrentUserType, + nullable: true, + resolve: async (_root, _args, ctx) => { + if (!ctx.currentUser) return null; + const user = await ctx.db + .select() + .from(users) + .where(eq(users.id, ctx.currentUser.id)) + .limit(1); + return user[0] || null; + }, + }), +); + +builder.queryField("userProfile", (t) => + t.field({ + type: UserType, + nullable: true, + args: { + id: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const user = await ctx.db + .select() + .from(users) + .where(eq(users.id, args.id)) + .limit(1); + return user[0] || null; + }, + }), +); + +builder.mutationField("updateProfile", (t) => + t.field({ + type: CurrentUserType, + nullable: true, + args: { + firstName: t.arg.string(), + lastName: t.arg.string(), + artistName: t.arg.string(), + description: t.arg.string(), + tags: t.arg.stringList(), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const updates: Record = { date_updated: new Date() }; + if (args.firstName !== undefined && args.firstName !== null) updates.first_name = args.firstName; + if (args.lastName !== undefined && args.lastName !== null) updates.last_name = args.lastName; + if (args.artistName !== undefined && args.artistName !== null) updates.artist_name = args.artistName; + if (args.description !== undefined && args.description !== null) updates.description = args.description; + if (args.tags !== undefined && args.tags !== null) updates.tags = args.tags; + + await ctx.db.update(users).set(updates as any).where(eq(users.id, ctx.currentUser.id)); + + const updated = await ctx.db + .select() + .from(users) + .where(eq(users.id, ctx.currentUser.id)) + .limit(1); + return updated[0] || null; + }, + }), +); diff --git a/packages/backend/src/graphql/resolvers/videos.ts b/packages/backend/src/graphql/resolvers/videos.ts new file mode 100644 index 0000000..cbead35 --- /dev/null +++ b/packages/backend/src/graphql/resolvers/videos.ts @@ -0,0 +1,320 @@ +import { GraphQLError } from "graphql"; +import { builder } from "../builder.js"; +import { VideoType, VideoLikeResponseType, VideoPlayResponseType, VideoLikeStatusType } from "../types/index.js"; +import { videos, video_models, video_likes, video_plays, users, files } from "../../db/schema/index.js"; +import { eq, and, lte, desc, inArray, count } from "drizzle-orm"; + +async function enrichVideo(db: any, video: any) { + // Fetch models + const modelRows = await db + .select({ + id: users.id, + artist_name: users.artist_name, + slug: users.slug, + avatar: users.avatar, + }) + .from(video_models) + .leftJoin(users, eq(video_models.user_id, users.id)) + .where(eq(video_models.video_id, video.id)); + + // Fetch movie file + let movieFile = null; + if (video.movie) { + const mf = await db.select().from(files).where(eq(files.id, video.movie)).limit(1); + movieFile = mf[0] || null; + } + + // Count likes + const likesCount = await db.select({ count: count() }).from(video_likes).where(eq(video_likes.video_id, video.id)); + const playsCount = await db.select({ count: count() }).from(video_plays).where(eq(video_plays.video_id, video.id)); + + return { + ...video, + models: modelRows, + movie_file: movieFile, + likes_count: likesCount[0]?.count || 0, + plays_count: playsCount[0]?.count || 0, + }; +} + +builder.queryField("videos", (t) => + t.field({ + type: [VideoType], + args: { + modelId: t.arg.string(), + featured: t.arg.boolean(), + limit: t.arg.int(), + }, + resolve: async (_root, args, ctx) => { + let query = ctx.db + .select({ v: videos }) + .from(videos) + .where(lte(videos.upload_date, new Date())) + .orderBy(desc(videos.upload_date)); + + if (args.modelId) { + const videoIds = await ctx.db + .select({ video_id: video_models.video_id }) + .from(video_models) + .where(eq(video_models.user_id, args.modelId)); + + if (videoIds.length === 0) return []; + + query = ctx.db + .select({ v: videos }) + .from(videos) + .where(and( + lte(videos.upload_date, new Date()), + inArray(videos.id, videoIds.map((v: any) => v.video_id)), + )) + .orderBy(desc(videos.upload_date)); + } + + if (args.featured !== null && args.featured !== undefined) { + query = ctx.db + .select({ v: videos }) + .from(videos) + .where(and( + lte(videos.upload_date, new Date()), + eq(videos.featured, args.featured), + )) + .orderBy(desc(videos.upload_date)); + } + + if (args.limit) { + query = (query as any).limit(args.limit); + } + + const rows = await query; + const videoList = rows.map((r: any) => r.v || r); + return Promise.all(videoList.map((v: any) => enrichVideo(ctx.db, v))); + }, + }), +); + +builder.queryField("video", (t) => + t.field({ + type: VideoType, + nullable: true, + args: { + slug: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + const video = await ctx.db + .select() + .from(videos) + .where(and(eq(videos.slug, args.slug), lte(videos.upload_date, new Date()))) + .limit(1); + + if (!video[0]) return null; + return enrichVideo(ctx.db, video[0]); + }, + }), +); + +builder.queryField("videoLikeStatus", (t) => + t.field({ + type: VideoLikeStatusType, + args: { + videoId: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) return { liked: false }; + const existing = await ctx.db + .select() + .from(video_likes) + .where(and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id))) + .limit(1); + return { liked: existing.length > 0 }; + }, + }), +); + +builder.mutationField("likeVideo", (t) => + t.field({ + type: VideoLikeResponseType, + args: { + videoId: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const existing = await ctx.db + .select() + .from(video_likes) + .where(and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id))) + .limit(1); + + if (existing.length > 0) throw new GraphQLError("Already liked"); + + await ctx.db.insert(video_likes).values({ + video_id: args.videoId, + user_id: ctx.currentUser.id, + }); + + await ctx.db + .update(videos) + .set({ likes_count: (await ctx.db.select({ c: videos.likes_count }).from(videos).where(eq(videos.id, args.videoId)).limit(1))[0]?.c as number + 1 || 1 }) + .where(eq(videos.id, args.videoId)); + + const likesCount = await ctx.db.select({ count: count() }).from(video_likes).where(eq(video_likes.video_id, args.videoId)); + return { liked: true, likes_count: likesCount[0]?.count || 1 }; + }, + }), +); + +builder.mutationField("unlikeVideo", (t) => + t.field({ + type: VideoLikeResponseType, + args: { + videoId: t.arg.string({ required: true }), + }, + resolve: async (_root, args, ctx) => { + if (!ctx.currentUser) throw new GraphQLError("Unauthorized"); + + const existing = await ctx.db + .select() + .from(video_likes) + .where(and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id))) + .limit(1); + + if (existing.length === 0) throw new GraphQLError("Not liked"); + + await ctx.db + .delete(video_likes) + .where(and(eq(video_likes.video_id, args.videoId), eq(video_likes.user_id, ctx.currentUser.id))); + + await ctx.db + .update(videos) + .set({ likes_count: Math.max(((await ctx.db.select({ c: videos.likes_count }).from(videos).where(eq(videos.id, args.videoId)).limit(1))[0]?.c as number || 1) - 1, 0) }) + .where(eq(videos.id, args.videoId)); + + const likesCount = await ctx.db.select({ count: count() }).from(video_likes).where(eq(video_likes.video_id, args.videoId)); + return { liked: false, likes_count: likesCount[0]?.count || 0 }; + }, + }), +); + +builder.mutationField("recordVideoPlay", (t) => + t.field({ + type: VideoPlayResponseType, + args: { + videoId: t.arg.string({ required: true }), + sessionId: t.arg.string(), + }, + resolve: async (_root, args, ctx) => { + const play = await ctx.db.insert(video_plays).values({ + video_id: args.videoId, + user_id: ctx.currentUser?.id || null, + session_id: args.sessionId || null, + }).returning({ id: video_plays.id }); + + const playsCount = await ctx.db.select({ count: count() }).from(video_plays).where(eq(video_plays.video_id, args.videoId)); + + await ctx.db + .update(videos) + .set({ plays_count: playsCount[0]?.count || 0 }) + .where(eq(videos.id, args.videoId)); + + return { + success: true, + play_id: play[0].id, + plays_count: playsCount[0]?.count || 0, + }; + }, + }), +); + +builder.mutationField("updateVideoPlay", (t) => + t.field({ + type: "Boolean", + args: { + videoId: t.arg.string({ required: true }), + playId: t.arg.string({ required: true }), + durationWatched: t.arg.int({ required: true }), + completed: t.arg.boolean({ required: true }), + }, + resolve: async (_root, args, ctx) => { + await ctx.db + .update(video_plays) + .set({ duration_watched: args.durationWatched, completed: args.completed, date_updated: new Date() }) + .where(eq(video_plays.id, args.playId)); + return true; + }, + }), +); + +builder.queryField("analytics", (t) => + t.field({ + type: "JSON", + nullable: true, + resolve: async (_root, _args, ctx) => { + if (!ctx.currentUser || ctx.currentUser.role !== "model") { + throw new GraphQLError("Unauthorized"); + } + + const userId = ctx.currentUser.id; + + // Get all videos by this user (via video_models) + const modelVideoIds = await ctx.db + .select({ video_id: video_models.video_id }) + .from(video_models) + .where(eq(video_models.user_id, userId)); + + if (modelVideoIds.length === 0) { + return { total_videos: 0, total_likes: 0, total_plays: 0, plays_by_date: {}, likes_by_date: {}, videos: [] }; + } + + const videoIds = modelVideoIds.map((v: any) => v.video_id); + const videoList = await ctx.db.select().from(videos).where(inArray(videos.id, videoIds)); + const plays = await ctx.db.select().from(video_plays).where(inArray(video_plays.video_id, videoIds)); + const likes = await ctx.db.select().from(video_likes).where(inArray(video_likes.video_id, videoIds)); + + const totalLikes = videoList.reduce((sum, v) => sum + (v.likes_count || 0), 0); + const totalPlays = videoList.reduce((sum, v) => sum + (v.plays_count || 0), 0); + + const playsByDate = plays.reduce((acc: any, play) => { + const date = new Date(play.date_created).toISOString().split("T")[0]; + if (!acc[date]) acc[date] = 0; + acc[date]++; + return acc; + }, {}); + + const likesByDate = likes.reduce((acc: any, like) => { + const date = new Date(like.date_created).toISOString().split("T")[0]; + if (!acc[date]) acc[date] = 0; + acc[date]++; + return acc; + }, {}); + + const videoAnalytics = videoList.map((video) => { + const vPlays = plays.filter((p) => p.video_id === video.id); + const completedPlays = vPlays.filter((p) => p.completed).length; + const avgWatchTime = vPlays.length > 0 + ? vPlays.reduce((sum, p) => sum + (p.duration_watched || 0), 0) / vPlays.length + : 0; + + return { + id: video.id, + title: video.title, + slug: video.slug, + upload_date: video.upload_date, + likes: video.likes_count || 0, + plays: video.plays_count || 0, + completed_plays: completedPlays, + completion_rate: video.plays_count ? (completedPlays / video.plays_count) * 100 : 0, + avg_watch_time: Math.round(avgWatchTime), + }; + }); + + return { + total_videos: videoList.length, + total_likes: totalLikes, + total_plays: totalPlays, + plays_by_date: playsByDate, + likes_by_date: likesByDate, + videos: videoAnalytics, + }; + }, + }), +); diff --git a/packages/backend/src/graphql/types/index.ts b/packages/backend/src/graphql/types/index.ts new file mode 100644 index 0000000..a324b74 --- /dev/null +++ b/packages/backend/src/graphql/types/index.ts @@ -0,0 +1,545 @@ +import { builder } from "../builder.js"; + +// File type +export const FileType = builder.objectRef<{ + id: string; + title: string | null; + description: string | null; + filename: string; + mime_type: string | null; + filesize: number | null; + duration: number | null; + uploaded_by: string | null; + date_created: Date; +}>("File").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + title: t.exposeString("title", { nullable: true }), + description: t.exposeString("description", { nullable: true }), + filename: t.exposeString("filename"), + mime_type: t.exposeString("mime_type", { nullable: true }), + filesize: t.exposeFloat("filesize", { nullable: true }), + duration: t.exposeInt("duration", { nullable: true }), + uploaded_by: t.exposeString("uploaded_by", { nullable: true }), + date_created: t.expose("date_created", { type: "DateTime" }), + }), +}); + +// User type +export const UserType = builder.objectRef<{ + id: string; + email: string; + first_name: string | null; + last_name: string | null; + artist_name: string | null; + slug: string | null; + description: string | null; + tags: string[] | null; + role: "model" | "viewer" | "admin"; + avatar: string | null; + banner: string | null; + email_verified: boolean; + date_created: Date; +}>("User").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + email: t.exposeString("email"), + first_name: t.exposeString("first_name", { nullable: true }), + last_name: t.exposeString("last_name", { nullable: true }), + artist_name: t.exposeString("artist_name", { nullable: true }), + slug: t.exposeString("slug", { nullable: true }), + description: t.exposeString("description", { nullable: true }), + tags: t.exposeStringList("tags", { nullable: true }), + role: t.exposeString("role"), + avatar: t.exposeString("avatar", { nullable: true }), + banner: t.exposeString("banner", { nullable: true }), + email_verified: t.exposeBoolean("email_verified"), + date_created: t.expose("date_created", { type: "DateTime" }), + }), +}); + +// CurrentUser type (same shape, used for auth context) +export const CurrentUserType = builder.objectRef<{ + id: string; + email: string; + first_name: string | null; + last_name: string | null; + artist_name: string | null; + slug: string | null; + description: string | null; + tags: string[] | null; + role: "model" | "viewer" | "admin"; + avatar: string | null; + banner: string | null; + email_verified: boolean; + date_created: Date; +}>("CurrentUser").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + email: t.exposeString("email"), + first_name: t.exposeString("first_name", { nullable: true }), + last_name: t.exposeString("last_name", { nullable: true }), + artist_name: t.exposeString("artist_name", { nullable: true }), + slug: t.exposeString("slug", { nullable: true }), + description: t.exposeString("description", { nullable: true }), + tags: t.exposeStringList("tags", { nullable: true }), + role: t.exposeString("role"), + avatar: t.exposeString("avatar", { nullable: true }), + banner: t.exposeString("banner", { nullable: true }), + email_verified: t.exposeBoolean("email_verified"), + date_created: t.expose("date_created", { type: "DateTime" }), + }), +}); + +// Video type +export const VideoType = builder.objectRef<{ + id: string; + slug: string; + title: string; + description: string | null; + image: string | null; + movie: string | null; + tags: string[] | null; + upload_date: Date; + premium: boolean | null; + featured: boolean | null; + likes_count: number | null; + plays_count: number | null; + models?: { id: string; artist_name: string | null; slug: string | null; avatar: string | null }[]; + movie_file?: { id: string; filename: string; mime_type: string | null; duration: number | null } | null; +}>("Video").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + slug: t.exposeString("slug"), + title: t.exposeString("title"), + description: t.exposeString("description", { nullable: true }), + image: t.exposeString("image", { nullable: true }), + movie: t.exposeString("movie", { nullable: true }), + tags: t.exposeStringList("tags", { nullable: true }), + upload_date: t.expose("upload_date", { type: "DateTime" }), + premium: t.exposeBoolean("premium", { nullable: true }), + featured: t.exposeBoolean("featured", { nullable: true }), + likes_count: t.exposeInt("likes_count", { nullable: true }), + plays_count: t.exposeInt("plays_count", { nullable: true }), + models: t.expose("models", { type: [VideoModelType], nullable: true }), + movie_file: t.expose("movie_file", { type: VideoFileType, nullable: true }), + }), +}); + +export const VideoModelType = builder.objectRef<{ + id: string; + artist_name: string | null; + slug: string | null; + avatar: string | null; +}>("VideoModel").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + artist_name: t.exposeString("artist_name", { nullable: true }), + slug: t.exposeString("slug", { nullable: true }), + avatar: t.exposeString("avatar", { nullable: true }), + }), +}); + +export const VideoFileType = builder.objectRef<{ + id: string; + filename: string; + mime_type: string | null; + duration: number | null; +}>("VideoFile").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + filename: t.exposeString("filename"), + mime_type: t.exposeString("mime_type", { nullable: true }), + duration: t.exposeInt("duration", { nullable: true }), + }), +}); + +// Model type (model profile, enriched user) +export const ModelType = builder.objectRef<{ + id: string; + slug: string | null; + artist_name: string | null; + description: string | null; + avatar: string | null; + banner: string | null; + tags: string[] | null; + date_created: Date; + photos?: { id: string; filename: string }[]; +}>("Model").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + slug: t.exposeString("slug", { nullable: true }), + artist_name: t.exposeString("artist_name", { nullable: true }), + description: t.exposeString("description", { nullable: true }), + avatar: t.exposeString("avatar", { nullable: true }), + banner: t.exposeString("banner", { nullable: true }), + tags: t.exposeStringList("tags", { nullable: true }), + date_created: t.expose("date_created", { type: "DateTime" }), + photos: t.expose("photos", { type: [ModelPhotoType], nullable: true }), + }), +}); + +export const ModelPhotoType = builder.objectRef<{ + id: string; + filename: string; +}>("ModelPhoto").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + filename: t.exposeString("filename"), + }), +}); + +// Article type +export const ArticleType = builder.objectRef<{ + id: string; + slug: string; + title: string; + excerpt: string | null; + content: string | null; + image: string | null; + tags: string[] | null; + publish_date: Date; + category: string | null; + featured: boolean | null; + author?: { first_name: string | null; last_name: string | null; avatar: string | null; description: string | null } | null; +}>("Article").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + slug: t.exposeString("slug"), + title: t.exposeString("title"), + excerpt: t.exposeString("excerpt", { nullable: true }), + content: t.exposeString("content", { nullable: true }), + image: t.exposeString("image", { nullable: true }), + tags: t.exposeStringList("tags", { nullable: true }), + publish_date: t.expose("publish_date", { type: "DateTime" }), + category: t.exposeString("category", { nullable: true }), + featured: t.exposeBoolean("featured", { nullable: true }), + author: t.expose("author", { type: ArticleAuthorType, nullable: true }), + }), +}); + +export const ArticleAuthorType = builder.objectRef<{ + first_name: string | null; + last_name: string | null; + avatar: string | null; + description: string | null; +}>("ArticleAuthor").implement({ + fields: (t) => ({ + first_name: t.exposeString("first_name", { nullable: true }), + last_name: t.exposeString("last_name", { nullable: true }), + avatar: t.exposeString("avatar", { nullable: true }), + description: t.exposeString("description", { nullable: true }), + }), +}); + +// Recording type +export const RecordingType = builder.objectRef<{ + id: string; + title: string; + description: string | null; + slug: string; + duration: number; + events: object[] | null; + device_info: object[] | null; + user_id: string; + status: string; + tags: string[] | null; + linked_video: string | null; + featured: boolean | null; + public: boolean | null; + date_created: Date; + date_updated: Date | null; +}>("Recording").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + title: t.exposeString("title"), + description: t.exposeString("description", { nullable: true }), + slug: t.exposeString("slug"), + duration: t.exposeInt("duration"), + events: t.expose("events", { type: "JSON", nullable: true }), + device_info: t.expose("device_info", { type: "JSON", nullable: true }), + user_id: t.exposeString("user_id"), + status: t.exposeString("status"), + tags: t.exposeStringList("tags", { nullable: true }), + linked_video: t.exposeString("linked_video", { nullable: true }), + featured: t.exposeBoolean("featured", { nullable: true }), + public: t.exposeBoolean("public", { nullable: true }), + date_created: t.expose("date_created", { type: "DateTime" }), + date_updated: t.expose("date_updated", { type: "DateTime", nullable: true }), + }), +}); + +// Comment type +export const CommentType = builder.objectRef<{ + id: number; + collection: string; + item_id: string; + comment: string; + user_id: string; + date_created: Date; + user?: { id: string; first_name: string | null; last_name: string | null; avatar: string | null } | null; +}>("Comment").implement({ + fields: (t) => ({ + id: t.exposeInt("id"), + collection: t.exposeString("collection"), + item_id: t.exposeString("item_id"), + comment: t.exposeString("comment"), + user_id: t.exposeString("user_id"), + date_created: t.expose("date_created", { type: "DateTime" }), + user: t.expose("user", { type: CommentUserType, nullable: true }), + }), +}); + +export const CommentUserType = builder.objectRef<{ + id: string; + first_name: string | null; + last_name: string | null; + avatar: string | null; +}>("CommentUser").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + first_name: t.exposeString("first_name", { nullable: true }), + last_name: t.exposeString("last_name", { nullable: true }), + avatar: t.exposeString("avatar", { nullable: true }), + }), +}); + +// Stats type +export const StatsType = builder.objectRef<{ + videos_count: number; + models_count: number; + viewers_count: number; +}>("Stats").implement({ + fields: (t) => ({ + videos_count: t.exposeInt("videos_count"), + models_count: t.exposeInt("models_count"), + viewers_count: t.exposeInt("viewers_count"), + }), +}); + +// Gamification types +export const LeaderboardEntryType = builder.objectRef<{ + user_id: string; + display_name: string | null; + avatar: string | null; + total_weighted_points: number | null; + total_raw_points: number | null; + recordings_count: number | null; + playbacks_count: number | null; + achievements_count: number | null; + rank: number; +}>("LeaderboardEntry").implement({ + fields: (t) => ({ + user_id: t.exposeString("user_id"), + display_name: t.exposeString("display_name", { nullable: true }), + avatar: t.exposeString("avatar", { nullable: true }), + total_weighted_points: t.exposeFloat("total_weighted_points", { nullable: true }), + total_raw_points: t.exposeInt("total_raw_points", { nullable: true }), + recordings_count: t.exposeInt("recordings_count", { nullable: true }), + playbacks_count: t.exposeInt("playbacks_count", { nullable: true }), + achievements_count: t.exposeInt("achievements_count", { nullable: true }), + rank: t.exposeInt("rank"), + }), +}); + +export const AchievementType = builder.objectRef<{ + id: string; + code: string; + name: string; + description: string | null; + icon: string | null; + category: string | null; + required_count: number; + points_reward: number; +}>("Achievement").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + code: t.exposeString("code"), + name: t.exposeString("name"), + description: t.exposeString("description", { nullable: true }), + icon: t.exposeString("icon", { nullable: true }), + category: t.exposeString("category", { nullable: true }), + required_count: t.exposeInt("required_count"), + points_reward: t.exposeInt("points_reward"), + }), +}); + +export const UserGamificationType = builder.objectRef<{ + stats: { + user_id: string; + total_raw_points: number | null; + total_weighted_points: number | null; + recordings_count: number | null; + playbacks_count: number | null; + comments_count: number | null; + achievements_count: number | null; + rank: number; + } | null; + achievements: { + id: string; + code: string; + name: string; + description: string | null; + icon: string | null; + category: string | null; + date_unlocked: Date; + progress: number | null; + required_count: number; + }[]; + recent_points: { + action: string; + points: number; + date_created: Date; + recording_id: string | null; + }[]; +}>("UserGamification").implement({ + fields: (t) => ({ + stats: t.expose("stats", { type: UserStatsType, nullable: true }), + achievements: t.expose("achievements", { type: [UserAchievementType] }), + recent_points: t.expose("recent_points", { type: [RecentPointType] }), + }), +}); + +export const UserStatsType = builder.objectRef<{ + user_id: string; + total_raw_points: number | null; + total_weighted_points: number | null; + recordings_count: number | null; + playbacks_count: number | null; + comments_count: number | null; + achievements_count: number | null; + rank: number; +}>("UserStats").implement({ + fields: (t) => ({ + user_id: t.exposeString("user_id"), + total_raw_points: t.exposeInt("total_raw_points", { nullable: true }), + total_weighted_points: t.exposeFloat("total_weighted_points", { nullable: true }), + recordings_count: t.exposeInt("recordings_count", { nullable: true }), + playbacks_count: t.exposeInt("playbacks_count", { nullable: true }), + comments_count: t.exposeInt("comments_count", { nullable: true }), + achievements_count: t.exposeInt("achievements_count", { nullable: true }), + rank: t.exposeInt("rank"), + }), +}); + +export const UserAchievementType = builder.objectRef<{ + id: string; + code: string; + name: string; + description: string | null; + icon: string | null; + category: string | null; + date_unlocked: Date; + progress: number | null; + required_count: number; +}>("UserAchievement").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + code: t.exposeString("code"), + name: t.exposeString("name"), + description: t.exposeString("description", { nullable: true }), + icon: t.exposeString("icon", { nullable: true }), + category: t.exposeString("category", { nullable: true }), + date_unlocked: t.expose("date_unlocked", { type: "DateTime" }), + progress: t.exposeInt("progress", { nullable: true }), + required_count: t.exposeInt("required_count"), + }), +}); + +export const RecentPointType = builder.objectRef<{ + action: string; + points: number; + date_created: Date; + recording_id: string | null; +}>("RecentPoint").implement({ + fields: (t) => ({ + action: t.exposeString("action"), + points: t.exposeInt("points"), + date_created: t.expose("date_created", { type: "DateTime" }), + recording_id: t.exposeString("recording_id", { nullable: true }), + }), +}); + +// Analytics types +export const AnalyticsType = builder.objectRef<{ + total_videos: number; + total_likes: number; + total_plays: number; + plays_by_date: Record; + likes_by_date: Record; + videos: { + id: string; + title: string; + slug: string; + upload_date: Date; + likes: number; + plays: number; + completed_plays: number; + completion_rate: number; + avg_watch_time: number; + }[]; +}>("Analytics").implement({ + fields: (t) => ({ + total_videos: t.exposeInt("total_videos"), + total_likes: t.exposeInt("total_likes"), + total_plays: t.exposeInt("total_plays"), + plays_by_date: t.expose("plays_by_date", { type: "JSON" }), + likes_by_date: t.expose("likes_by_date", { type: "JSON" }), + videos: t.expose("videos", { type: [VideoAnalyticsType] }), + }), +}); + +export const VideoAnalyticsType = builder.objectRef<{ + id: string; + title: string; + slug: string; + upload_date: Date; + likes: number; + plays: number; + completed_plays: number; + completion_rate: number; + avg_watch_time: number; +}>("VideoAnalytics").implement({ + fields: (t) => ({ + id: t.exposeString("id"), + title: t.exposeString("title"), + slug: t.exposeString("slug"), + upload_date: t.expose("upload_date", { type: "DateTime" }), + likes: t.exposeInt("likes"), + plays: t.exposeInt("plays"), + completed_plays: t.exposeInt("completed_plays"), + completion_rate: t.exposeFloat("completion_rate"), + avg_watch_time: t.exposeInt("avg_watch_time"), + }), +}); + +// Response types +export const VideoLikeResponseType = builder.objectRef<{ + liked: boolean; + likes_count: number; +}>("VideoLikeResponse").implement({ + fields: (t) => ({ + liked: t.exposeBoolean("liked"), + likes_count: t.exposeInt("likes_count"), + }), +}); + +export const VideoPlayResponseType = builder.objectRef<{ + success: boolean; + play_id: string; + plays_count: number; +}>("VideoPlayResponse").implement({ + fields: (t) => ({ + success: t.exposeBoolean("success"), + play_id: t.exposeString("play_id"), + plays_count: t.exposeInt("plays_count"), + }), +}); + +export const VideoLikeStatusType = builder.objectRef<{ + liked: boolean; +}>("VideoLikeStatus").implement({ + fields: (t) => ({ + liked: t.exposeBoolean("liked"), + }), +}); diff --git a/packages/backend/src/index.ts b/packages/backend/src/index.ts new file mode 100644 index 0000000..ce20f8e --- /dev/null +++ b/packages/backend/src/index.ts @@ -0,0 +1,87 @@ +import Fastify from "fastify"; +import fastifyCookie from "@fastify/cookie"; +import fastifyCors from "@fastify/cors"; +import fastifyMultipart from "@fastify/multipart"; +import fastifyStatic from "@fastify/static"; +import { createYoga } from "graphql-yoga"; +import path from "path"; +import { schema } from "./graphql/index.js"; +import { buildContext } from "./graphql/context.js"; +import { db } from "./db/connection.js"; +import { redis } from "./lib/auth.js"; + +const PORT = parseInt(process.env.PORT || "4000"); +const UPLOAD_DIR = process.env.UPLOAD_DIR || "/data/uploads"; +const CORS_ORIGIN = process.env.CORS_ORIGIN || "http://localhost:3000"; + +const fastify = Fastify({ + logger: { + level: process.env.LOG_LEVEL || "info", + }, +}); + +await fastify.register(fastifyCookie, { + secret: process.env.COOKIE_SECRET || "change-me-in-production", +}); + +await fastify.register(fastifyCors, { + origin: CORS_ORIGIN, + credentials: true, + methods: ["GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"], +}); + +await fastify.register(fastifyMultipart, { + limits: { + fileSize: 5 * 1024 * 1024 * 1024, // 5 GB + }, +}); + +await fastify.register(fastifyStatic, { + root: path.resolve(UPLOAD_DIR), + prefix: "/assets/", + decorateReply: false, +}); + +const yoga = createYoga({ + schema, + context: buildContext, + graphqlEndpoint: "/graphql", + healthCheckEndpoint: "/health", + logging: { + debug: (...args) => fastify.log.debug(...args), + info: (...args) => fastify.log.info(...args), + warn: (...args) => fastify.log.warn(...args), + error: (...args) => fastify.log.error(...args), + }, +}); + +fastify.route({ + url: "/graphql", + method: ["GET", "POST", "OPTIONS"], + handler: async (request, reply) => { + const response = await yoga.handleNodeRequestAndResponse(request, reply, { + request, + reply, + db, + redis, + }); + reply.status(response.status); + for (const [key, value] of response.headers.entries()) { + reply.header(key, value); + } + return reply.send(response.body); + }, +}); + +fastify.get("/health", async (_request, reply) => { + return reply.send({ status: "ok", timestamp: new Date().toISOString() }); +}); + +try { + await fastify.listen({ port: PORT, host: "0.0.0.0" }); + fastify.log.info(`Backend running at http://0.0.0.0:${PORT}`); + fastify.log.info(`GraphQL at http://0.0.0.0:${PORT}/graphql`); +} catch (err) { + fastify.log.error(err); + process.exit(1); +} diff --git a/packages/backend/src/lib/argon.ts b/packages/backend/src/lib/argon.ts new file mode 100644 index 0000000..422b71a --- /dev/null +++ b/packages/backend/src/lib/argon.ts @@ -0,0 +1,9 @@ +import argon2 from "argon2"; + +export async function hash(password: string): Promise { + return argon2.hash(password); +} + +export async function verify(hash: string, password: string): Promise { + return argon2.verify(hash, password); +} diff --git a/packages/backend/src/lib/auth.ts b/packages/backend/src/lib/auth.ts new file mode 100644 index 0000000..2782896 --- /dev/null +++ b/packages/backend/src/lib/auth.ts @@ -0,0 +1,28 @@ +import Redis from "ioredis"; + +export type SessionUser = { + id: string; + email: string; + role: "model" | "viewer" | "admin"; + first_name: string | null; + last_name: string | null; + artist_name: string | null; + slug: string | null; + avatar: string | null; +}; + +export const redis = new Redis(process.env.REDIS_URL || "redis://localhost:6379"); + +export async function setSession(token: string, user: SessionUser): Promise { + await redis.set(`session:${token}`, JSON.stringify(user), "EX", 86400); +} + +export async function getSession(token: string): Promise { + const data = await redis.get(`session:${token}`); + if (!data) return null; + return JSON.parse(data) as SessionUser; +} + +export async function deleteSession(token: string): Promise { + await redis.del(`session:${token}`); +} diff --git a/packages/backend/src/lib/email.ts b/packages/backend/src/lib/email.ts new file mode 100644 index 0000000..30e6df2 --- /dev/null +++ b/packages/backend/src/lib/email.ts @@ -0,0 +1,32 @@ +import nodemailer from "nodemailer"; + +const transporter = nodemailer.createTransport({ + host: process.env.SMTP_HOST || "localhost", + port: parseInt(process.env.SMTP_PORT || "587"), + secure: process.env.SMTP_SECURE === "true", + auth: process.env.SMTP_USER ? { + user: process.env.SMTP_USER, + pass: process.env.SMTP_PASS, + } : undefined, +}); + +const FROM = process.env.EMAIL_FROM || "noreply@sexy.pivoine.art"; +const BASE_URL = process.env.PUBLIC_URL || "http://localhost:3000"; + +export async function sendVerification(email: string, token: string): Promise { + await transporter.sendMail({ + from: FROM, + to: email, + subject: "Verify your email", + html: `

Click here to verify your email.

`, + }); +} + +export async function sendPasswordReset(email: string, token: string): Promise { + await transporter.sendMail({ + from: FROM, + to: email, + subject: "Reset your password", + html: `

Click here to reset your password.

`, + }); +} diff --git a/packages/backend/src/lib/ffmpeg.ts b/packages/backend/src/lib/ffmpeg.ts new file mode 100644 index 0000000..1511efb --- /dev/null +++ b/packages/backend/src/lib/ffmpeg.ts @@ -0,0 +1,10 @@ +import ffmpeg from "fluent-ffmpeg"; + +export function extractDuration(filePath: string): Promise { + return new Promise((resolve, reject) => { + ffmpeg.ffprobe(filePath, (err, metadata) => { + if (err) return reject(err); + resolve(Math.round(metadata.format.duration || 0)); + }); + }); +} diff --git a/packages/backend/src/lib/gamification.ts b/packages/backend/src/lib/gamification.ts new file mode 100644 index 0000000..f8c6c56 --- /dev/null +++ b/packages/backend/src/lib/gamification.ts @@ -0,0 +1,324 @@ +import { eq, sql, and, gt, isNotNull, count, sum } from "drizzle-orm"; +import type { DB } from "../db/connection.js"; +import { + user_points, + user_stats, + recordings, + recording_plays, + comments, + user_achievements, + achievements, + users, +} from "../db/schema/index.js"; + +export const POINT_VALUES = { + RECORDING_CREATE: 50, + RECORDING_PLAY: 10, + RECORDING_COMPLETE: 5, + COMMENT_CREATE: 5, + RECORDING_FEATURED: 100, +} as const; + +const DECAY_LAMBDA = 0.005; + +export async function awardPoints( + db: DB, + userId: string, + action: keyof typeof POINT_VALUES, + recordingId?: string, +): Promise { + const points = POINT_VALUES[action]; + await db.insert(user_points).values({ + user_id: userId, + action, + points, + recording_id: recordingId || null, + date_created: new Date(), + }); + await updateUserStats(db, userId); +} + +export async function calculateWeightedScore(db: DB, userId: string): Promise { + const now = new Date(); + const result = await db.execute(sql` + SELECT SUM( + points * EXP(-${DECAY_LAMBDA} * EXTRACT(EPOCH FROM (${now}::timestamptz - date_created)) / 86400) + ) as weighted_score + FROM user_points + WHERE user_id = ${userId} + `); + return parseFloat((result.rows[0] as any)?.weighted_score || "0"); +} + +export async function updateUserStats(db: DB, userId: string): Promise { + const now = new Date(); + + const rawPointsResult = await db + .select({ total: sum(user_points.points) }) + .from(user_points) + .where(eq(user_points.user_id, userId)); + const totalRawPoints = parseInt(String(rawPointsResult[0]?.total || "0")); + + const totalWeightedPoints = await calculateWeightedScore(db, userId); + + const recordingsResult = await db + .select({ count: count() }) + .from(recordings) + .where(and(eq(recordings.user_id, userId), eq(recordings.status, "published"))); + const recordingsCount = recordingsResult[0]?.count || 0; + + // Get playbacks count (excluding own recordings) + const ownRecordingIds = await db + .select({ id: recordings.id }) + .from(recordings) + .where(eq(recordings.user_id, userId)); + const ownIds = ownRecordingIds.map((r) => r.id); + + let playbacksCount = 0; + if (ownIds.length > 0) { + const playbacksResult = await db.execute(sql` + SELECT COUNT(*) as count FROM recording_plays + WHERE user_id = ${userId} + AND recording_id NOT IN (${sql.join(ownIds.map(id => sql`${id}`), sql`, `)}) + `); + playbacksCount = parseInt((playbacksResult.rows[0] as any)?.count || "0"); + } else { + const playbacksResult = await db + .select({ count: count() }) + .from(recording_plays) + .where(eq(recording_plays.user_id, userId)); + playbacksCount = playbacksResult[0]?.count || 0; + } + + const commentsResult = await db + .select({ count: count() }) + .from(comments) + .where(and(eq(comments.user_id, userId), eq(comments.collection, "recordings"))); + const commentsCount = commentsResult[0]?.count || 0; + + const achievementsResult = await db + .select({ count: count() }) + .from(user_achievements) + .where(and(eq(user_achievements.user_id, userId), isNotNull(user_achievements.date_unlocked))); + const achievementsCount = achievementsResult[0]?.count || 0; + + const existing = await db + .select() + .from(user_stats) + .where(eq(user_stats.user_id, userId)) + .limit(1); + + if (existing.length > 0) { + await db + .update(user_stats) + .set({ + total_raw_points: totalRawPoints, + total_weighted_points: totalWeightedPoints, + recordings_count: recordingsCount, + playbacks_count: playbacksCount, + comments_count: commentsCount, + achievements_count: achievementsCount, + last_updated: now, + }) + .where(eq(user_stats.user_id, userId)); + } else { + await db.insert(user_stats).values({ + user_id: userId, + total_raw_points: totalRawPoints, + total_weighted_points: totalWeightedPoints, + recordings_count: recordingsCount, + playbacks_count: playbacksCount, + comments_count: commentsCount, + achievements_count: achievementsCount, + last_updated: now, + }); + } +} + +export async function checkAchievements( + db: DB, + userId: string, + category?: string, +): Promise { + let achievementsQuery = db + .select() + .from(achievements) + .where(eq(achievements.status, "published")); + + if (category) { + achievementsQuery = db + .select() + .from(achievements) + .where(and(eq(achievements.status, "published"), eq(achievements.category, category))); + } + + const achievementsList = await achievementsQuery; + + for (const achievement of achievementsList) { + const progress = await getAchievementProgress(db, userId, achievement); + + const existing = await db + .select() + .from(user_achievements) + .where( + and( + eq(user_achievements.user_id, userId), + eq(user_achievements.achievement_id, achievement.id), + ), + ) + .limit(1); + + const isUnlocked = progress >= achievement.required_count; + const wasUnlocked = existing[0]?.date_unlocked !== null; + + if (existing.length > 0) { + await db + .update(user_achievements) + .set({ + progress, + date_unlocked: isUnlocked ? (existing[0].date_unlocked || new Date()) : null, + }) + .where( + and( + eq(user_achievements.user_id, userId), + eq(user_achievements.achievement_id, achievement.id), + ), + ); + } else { + await db.insert(user_achievements).values({ + user_id: userId, + achievement_id: achievement.id, + progress, + date_unlocked: isUnlocked ? new Date() : null, + }); + } + + if (isUnlocked && !wasUnlocked && achievement.points_reward > 0) { + await db.insert(user_points).values({ + user_id: userId, + action: `ACHIEVEMENT_${achievement.code}`, + points: achievement.points_reward, + recording_id: null, + date_created: new Date(), + }); + await updateUserStats(db, userId); + } + } +} + +async function getAchievementProgress( + db: DB, + userId: string, + achievement: typeof achievements.$inferSelect, +): Promise { + const { code } = achievement; + + if (["first_recording", "recording_10", "recording_50", "recording_100"].includes(code)) { + const result = await db + .select({ count: count() }) + .from(recordings) + .where(and(eq(recordings.user_id, userId), eq(recordings.status, "published"))); + return result[0]?.count || 0; + } + + if (code === "featured_recording") { + const result = await db + .select({ count: count() }) + .from(recordings) + .where( + and( + eq(recordings.user_id, userId), + eq(recordings.status, "published"), + eq(recordings.featured, true), + ), + ); + return result[0]?.count || 0; + } + + if (["first_play", "play_100", "play_500"].includes(code)) { + const result = await db.execute(sql` + SELECT COUNT(*) as count + FROM recording_plays rp + LEFT JOIN recordings r ON rp.recording_id = r.id + WHERE rp.user_id = ${userId} + AND r.user_id != ${userId} + `); + return parseInt((result.rows[0] as any)?.count || "0"); + } + + if (["completionist_10", "completionist_100"].includes(code)) { + const result = await db + .select({ count: count() }) + .from(recording_plays) + .where(and(eq(recording_plays.user_id, userId), eq(recording_plays.completed, true))); + return result[0]?.count || 0; + } + + if (["first_comment", "comment_50", "comment_250"].includes(code)) { + const result = await db + .select({ count: count() }) + .from(comments) + .where(and(eq(comments.user_id, userId), eq(comments.collection, "recordings"))); + return result[0]?.count || 0; + } + + if (code === "early_adopter") { + const user = await db.select().from(users).where(eq(users.id, userId)).limit(1); + if (user[0]) { + const joinDate = new Date(user[0].date_created); + const platformLaunch = new Date("2025-01-01"); + const oneMonthAfterLaunch = new Date(platformLaunch); + oneMonthAfterLaunch.setMonth(oneMonthAfterLaunch.getMonth() + 1); + return joinDate <= oneMonthAfterLaunch ? 1 : 0; + } + } + + if (code === "one_year") { + const user = await db.select().from(users).where(eq(users.id, userId)).limit(1); + if (user[0]) { + const joinDate = new Date(user[0].date_created); + const oneYearAgo = new Date(); + oneYearAgo.setFullYear(oneYearAgo.getFullYear() - 1); + return joinDate <= oneYearAgo ? 1 : 0; + } + } + + if (code === "balanced_creator") { + const recordingsResult = await db + .select({ count: count() }) + .from(recordings) + .where(and(eq(recordings.user_id, userId), eq(recordings.status, "published"))); + const playsResult = await db.execute(sql` + SELECT COUNT(*) as count FROM recording_plays rp + LEFT JOIN recordings r ON rp.recording_id = r.id + WHERE rp.user_id = ${userId} AND r.user_id != ${userId} + `); + const rc = recordingsResult[0]?.count || 0; + const pc = parseInt((playsResult.rows[0] as any)?.count || "0"); + return rc >= 50 && pc >= 100 ? 1 : 0; + } + + if (code === "top_10_rank") { + const userStat = await db + .select() + .from(user_stats) + .where(eq(user_stats.user_id, userId)) + .limit(1); + if (!userStat[0]) return 0; + const rankResult = await db + .select({ count: count() }) + .from(user_stats) + .where(gt(user_stats.total_weighted_points, userStat[0].total_weighted_points || 0)); + const userRank = (rankResult[0]?.count || 0) + 1; + return userRank <= 10 ? 1 : 0; + } + + return 0; +} + +export async function recalculateAllWeightedScores(db: DB): Promise { + const allUsers = await db.select({ user_id: user_stats.user_id }).from(user_stats); + for (const u of allUsers) { + await updateUserStats(db, u.user_id); + } +} diff --git a/packages/backend/src/lib/slugify.ts b/packages/backend/src/lib/slugify.ts new file mode 100644 index 0000000..5ccdb2c --- /dev/null +++ b/packages/backend/src/lib/slugify.ts @@ -0,0 +1,5 @@ +import slugifyLib from "slugify"; + +export function slugify(text: string): string { + return slugifyLib(text, { lower: true, strict: true }); +} diff --git a/packages/backend/src/scripts/data-migration.ts b/packages/backend/src/scripts/data-migration.ts new file mode 100644 index 0000000..beba5e2 --- /dev/null +++ b/packages/backend/src/scripts/data-migration.ts @@ -0,0 +1,566 @@ +/** + * Data Migration: Directus → Custom Backend + * + * Migrates data from Directus tables to the new schema. + * Run with: tsx src/scripts/data-migration.ts + * + * Environment variables: + * DATABASE_URL - PostgreSQL connection (same DB) + * OLD_UPLOAD_DIR - Path to Directus uploads (e.g. /old-uploads) + * NEW_UPLOAD_DIR - Path to new upload dir (e.g. /data/uploads) + */ + +import { Pool } from "pg"; +import fs from "fs"; +import path from "path"; + +const DATABASE_URL = process.env.DATABASE_URL || "postgresql://sexy:sexy@localhost:5432/sexy"; +const OLD_UPLOAD_DIR = process.env.OLD_UPLOAD_DIR || "/old-uploads"; +const NEW_UPLOAD_DIR = process.env.NEW_UPLOAD_DIR || "/data/uploads"; + +const pool = new Pool({ connectionString: DATABASE_URL }); + +async function query(sql: string, params: unknown[] = []) { + const client = await pool.connect(); + try { + return await client.query(sql, params); + } finally { + client.release(); + } +} + +function copyFile(src: string, dest: string) { + const dir = path.dirname(dest); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + if (fs.existsSync(src)) { + fs.copyFileSync(src, dest); + return true; + } + return false; +} + +async function migrateFiles() { + console.log("šŸ“ Migrating files..."); + const { rows } = await query( + `SELECT id, title, description, filename_disk, type, filesize, duration, uploaded_by, date_created + FROM directus_files`, + ); + + let migrated = 0; + let skipped = 0; + + for (const file of rows) { + // Check if already migrated + const existing = await query("SELECT id FROM files WHERE id = $1", [file.id]); + if (existing.rows.length > 0) { + skipped++; + continue; + } + + await query( + `INSERT INTO files (id, title, description, filename, mime_type, filesize, duration, uploaded_by, date_created) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) + ON CONFLICT (id) DO NOTHING`, + [ + file.id, + file.title, + file.description, + file.filename_disk || `${file.id}`, + file.type, + file.filesize, + file.duration, + file.uploaded_by, + file.date_created, + ], + ); + + // Copy file to new location + const srcPath = path.join(OLD_UPLOAD_DIR, file.filename_disk || ""); + const destPath = path.join(NEW_UPLOAD_DIR, file.id, file.filename_disk || `${file.id}`); + const copied = copyFile(srcPath, destPath); + + if (!copied) { + console.warn(` āš ļø File not found on disk: ${file.filename_disk}`); + } + + migrated++; + } + + console.log(` āœ… Files: ${migrated} migrated, ${skipped} already existed`); +} + +async function migrateUsers() { + console.log("šŸ‘„ Migrating users..."); + const { rows } = await query( + `SELECT u.id, u.email, u.password, u.first_name, u.last_name, + u.description, u.avatar, u.date_created, + u.artist_name, u.slug, u.email_notifications_key, + r.name as role_name + FROM directus_users u + LEFT JOIN directus_roles r ON u.role = r.id + WHERE u.status = 'active'`, + ); + + let migrated = 0; + + for (const user of rows) { + const existing = await query("SELECT id FROM users WHERE id = $1", [user.id]); + if (existing.rows.length > 0) { + migrated++; + continue; + } + + const role = + user.role_name === "Model" + ? "model" + : user.role_name === "Administrator" + ? "admin" + : "viewer"; + + // Fetch tags from custom user fields if they exist + let tags: string[] = []; + try { + const tagsRes = await query("SELECT tags FROM directus_users WHERE id = $1", [user.id]); + if (tagsRes.rows[0]?.tags) { + tags = Array.isArray(tagsRes.rows[0].tags) + ? tagsRes.rows[0].tags + : JSON.parse(tagsRes.rows[0].tags || "[]"); + } + } catch {} + + await query( + `INSERT INTO users (id, email, password_hash, first_name, last_name, artist_name, slug, + description, tags, role, avatar, email_verified, date_created) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + ON CONFLICT (id) DO NOTHING`, + [ + user.id, + user.email, + user.password || "MIGRATED_NO_PASSWORD", + user.first_name, + user.last_name, + user.artist_name, + user.slug, + user.description, + JSON.stringify(tags), + role, + user.avatar, + true, // Assume existing users are verified + user.date_created, + ], + ); + migrated++; + } + + console.log(` āœ… Users: ${migrated} migrated`); +} + +async function migrateUserPhotos() { + console.log("šŸ–¼ļø Migrating user photos..."); + const { rows } = await query( + `SELECT directus_users_id as user_id, directus_files_id as file_id, sort + FROM junction_directus_users_files`, + ); + + let migrated = 0; + for (const row of rows) { + const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]); + const fileExists = await query("SELECT id FROM files WHERE id = $1", [row.file_id]); + if (!userExists.rows.length || !fileExists.rows.length) continue; + + await query( + `INSERT INTO user_photos (user_id, file_id, sort) VALUES ($1, $2, $3) + ON CONFLICT DO NOTHING`, + [row.user_id, row.file_id, row.sort || 0], + ); + migrated++; + } + + console.log(` āœ… User photos: ${migrated} migrated`); +} + +async function migrateArticles() { + console.log("šŸ“° Migrating articles..."); + const { rows } = await query( + `SELECT id, slug, title, excerpt, content, image, tags, publish_date, + author, category, featured, date_created, date_updated + FROM sexy_articles`, + ); + + let migrated = 0; + for (const article of rows) { + await query( + `INSERT INTO articles (id, slug, title, excerpt, content, image, tags, publish_date, + author, category, featured, date_created, date_updated) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13) + ON CONFLICT (id) DO NOTHING`, + [ + article.id, + article.slug, + article.title, + article.excerpt, + article.content, + article.image, + Array.isArray(article.tags) ? JSON.stringify(article.tags) : article.tags, + article.publish_date, + article.author, + article.category, + article.featured, + article.date_created, + article.date_updated, + ], + ); + migrated++; + } + + console.log(` āœ… Articles: ${migrated} migrated`); +} + +async function migrateVideos() { + console.log("šŸŽ¬ Migrating videos..."); + const { rows } = await query( + `SELECT id, slug, title, description, image, movie, tags, upload_date, + premium, featured, likes_count, plays_count + FROM sexy_videos`, + ); + + let migrated = 0; + for (const video of rows) { + await query( + `INSERT INTO videos (id, slug, title, description, image, movie, tags, upload_date, + premium, featured, likes_count, plays_count) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12) + ON CONFLICT (id) DO NOTHING`, + [ + video.id, + video.slug, + video.title, + video.description, + video.image, + video.movie, + Array.isArray(video.tags) ? JSON.stringify(video.tags) : video.tags, + video.upload_date, + video.premium, + video.featured, + video.likes_count || 0, + video.plays_count || 0, + ], + ); + migrated++; + } + + console.log(` āœ… Videos: ${migrated} migrated`); +} + +async function migrateVideoModels() { + console.log("šŸ”— Migrating video models..."); + const { rows } = await query( + `SELECT sexy_videos_id as video_id, directus_users_id as user_id + FROM sexy_videos_models`, + ); + + let migrated = 0; + for (const row of rows) { + const videoExists = await query("SELECT id FROM videos WHERE id = $1", [row.video_id]); + const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]); + if (!videoExists.rows.length || !userExists.rows.length) continue; + + await query( + `INSERT INTO video_models (video_id, user_id) VALUES ($1, $2) ON CONFLICT DO NOTHING`, + [row.video_id, row.user_id], + ); + migrated++; + } + + console.log(` āœ… Video models: ${migrated} migrated`); +} + +async function migrateVideoLikes() { + console.log("ā¤ļø Migrating video likes..."); + const { rows } = await query( + `SELECT id, video_id, user_id, date_created FROM sexy_video_likes`, + ); + + let migrated = 0; + for (const row of rows) { + await query( + `INSERT INTO video_likes (id, video_id, user_id, date_created) VALUES ($1, $2, $3, $4) + ON CONFLICT (id) DO NOTHING`, + [row.id, row.video_id, row.user_id, row.date_created], + ); + migrated++; + } + + console.log(` āœ… Video likes: ${migrated} migrated`); +} + +async function migrateVideoPlays() { + console.log("ā–¶ļø Migrating video plays..."); + const { rows } = await query( + `SELECT id, video_id, user_id, session_id, duration_watched, completed, date_created + FROM sexy_video_plays`, + ); + + let migrated = 0; + for (const row of rows) { + await query( + `INSERT INTO video_plays (id, video_id, user_id, session_id, duration_watched, completed, date_created) + VALUES ($1, $2, $3, $4, $5, $6, $7) + ON CONFLICT (id) DO NOTHING`, + [ + row.id, + row.video_id, + row.user_id, + row.session_id, + row.duration_watched, + row.completed, + row.date_created, + ], + ); + migrated++; + } + + console.log(` āœ… Video plays: ${migrated} migrated`); +} + +async function migrateRecordings() { + console.log("šŸŽ™ļø Migrating recordings..."); + const { rows } = await query( + `SELECT id, title, description, slug, duration, events, device_info, + user_created as user_id, status, tags, linked_video, featured, public, + original_recording_id, date_created, date_updated + FROM sexy_recordings`, + ); + + let migrated = 0; + for (const recording of rows) { + await query( + `INSERT INTO recordings (id, title, description, slug, duration, events, device_info, + user_id, status, tags, linked_video, featured, public, + original_recording_id, date_created, date_updated) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16) + ON CONFLICT (id) DO NOTHING`, + [ + recording.id, + recording.title, + recording.description, + recording.slug, + recording.duration, + typeof recording.events === "string" ? recording.events : JSON.stringify(recording.events), + typeof recording.device_info === "string" + ? recording.device_info + : JSON.stringify(recording.device_info), + recording.user_id, + recording.status, + Array.isArray(recording.tags) ? JSON.stringify(recording.tags) : recording.tags, + recording.linked_video, + recording.featured, + recording.public, + recording.original_recording_id, + recording.date_created, + recording.date_updated, + ], + ); + migrated++; + } + + console.log(` āœ… Recordings: ${migrated} migrated`); +} + +async function migrateRecordingPlays() { + console.log("ā–¶ļø Migrating recording plays..."); + const { rows } = await query( + `SELECT id, user_id, recording_id, duration_played, completed, date_created + FROM sexy_recording_plays`, + ); + + let migrated = 0; + for (const row of rows) { + await query( + `INSERT INTO recording_plays (id, recording_id, user_id, duration_played, completed, date_created) + VALUES ($1, $2, $3, $4, $5, $6) + ON CONFLICT (id) DO NOTHING`, + [row.id, row.recording_id, row.user_id, row.duration_played, row.completed, row.date_created], + ); + migrated++; + } + + console.log(` āœ… Recording plays: ${migrated} migrated`); +} + +async function migrateComments() { + console.log("šŸ’¬ Migrating comments..."); + const { rows } = await query( + `SELECT id, collection, item, comment, user_created as user_id, date_created + FROM directus_comments + WHERE collection IN ('sexy_videos', 'sexy_recordings')`, + ); + + let migrated = 0; + for (const row of rows) { + // Map collection names + const collection = row.collection === "sexy_videos" ? "videos" : "recordings"; + + await query( + `INSERT INTO comments (collection, item_id, comment, user_id, date_created) + VALUES ($1, $2, $3, $4, $5)`, + [collection, row.item, row.comment, row.user_id, row.date_created], + ); + migrated++; + } + + console.log(` āœ… Comments: ${migrated} migrated`); +} + +async function migrateAchievements() { + console.log("šŸ† Migrating achievements..."); + const { rows } = await query( + `SELECT id, code, name, description, icon, category, required_count, points_reward, status, sort + FROM sexy_achievements`, + ); + + let migrated = 0; + for (const row of rows) { + await query( + `INSERT INTO achievements (id, code, name, description, icon, category, required_count, points_reward, status, sort) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10) + ON CONFLICT (id) DO NOTHING`, + [ + row.id, + row.code, + row.name, + row.description, + row.icon, + row.category, + row.required_count, + row.points_reward, + row.status, + row.sort, + ], + ); + migrated++; + } + + console.log(` āœ… Achievements: ${migrated} migrated`); +} + +async function migrateUserAchievements() { + console.log("šŸŽ–ļø Migrating user achievements..."); + const { rows } = await query( + `SELECT user_id, achievement_id, progress, date_unlocked FROM sexy_user_achievements`, + ); + + let migrated = 0; + for (const row of rows) { + const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]); + const achievementExists = await query("SELECT id FROM achievements WHERE id = $1", [ + row.achievement_id, + ]); + if (!userExists.rows.length || !achievementExists.rows.length) continue; + + await query( + `INSERT INTO user_achievements (user_id, achievement_id, progress, date_unlocked) + VALUES ($1, $2, $3, $4) + ON CONFLICT (user_id, achievement_id) DO NOTHING`, + [row.user_id, row.achievement_id, row.progress, row.date_unlocked], + ); + migrated++; + } + + console.log(` āœ… User achievements: ${migrated} migrated`); +} + +async function migrateUserPoints() { + console.log("šŸ’Ž Migrating user points..."); + const { rows } = await query( + `SELECT user_id, action, points, recording_id, date_created FROM sexy_user_points`, + ); + + let migrated = 0; + for (const row of rows) { + const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]); + if (!userExists.rows.length) continue; + + await query( + `INSERT INTO user_points (user_id, action, points, recording_id, date_created) + VALUES ($1, $2, $3, $4, $5)`, + [row.user_id, row.action, row.points, row.recording_id, row.date_created], + ); + migrated++; + } + + console.log(` āœ… User points: ${migrated} migrated`); +} + +async function migrateUserStats() { + console.log("šŸ“Š Migrating user stats..."); + const { rows } = await query( + `SELECT user_id, total_raw_points, total_weighted_points, recordings_count, + playbacks_count, comments_count, achievements_count, last_updated + FROM sexy_user_stats`, + ); + + let migrated = 0; + for (const row of rows) { + const userExists = await query("SELECT id FROM users WHERE id = $1", [row.user_id]); + if (!userExists.rows.length) continue; + + await query( + `INSERT INTO user_stats (user_id, total_raw_points, total_weighted_points, recordings_count, + playbacks_count, comments_count, achievements_count, last_updated) + VALUES ($1, $2, $3, $4, $5, $6, $7, $8) + ON CONFLICT (user_id) DO NOTHING`, + [ + row.user_id, + row.total_raw_points, + row.total_weighted_points, + row.recordings_count, + row.playbacks_count, + row.comments_count, + row.achievements_count, + row.last_updated, + ], + ); + migrated++; + } + + console.log(` āœ… User stats: ${migrated} migrated`); +} + +async function main() { + console.log("šŸš€ Starting data migration from Directus to custom backend...\n"); + + try { + // Verify connection + await query("SELECT 1"); + console.log("āœ… Database connected\n"); + + // Migration order respects FK dependencies + await migrateFiles(); + await migrateUsers(); + await migrateUserPhotos(); + await migrateArticles(); + await migrateVideos(); + await migrateVideoModels(); + await migrateVideoLikes(); + await migrateVideoPlays(); + await migrateRecordings(); + await migrateRecordingPlays(); + await migrateComments(); + await migrateAchievements(); + await migrateUserAchievements(); + await migrateUserPoints(); + await migrateUserStats(); + + console.log("\nšŸŽ‰ Migration complete!"); + } catch (error) { + console.error("āŒ Migration failed:", error); + process.exit(1); + } finally { + await pool.end(); + } +} + +main(); diff --git a/packages/backend/tsconfig.json b/packages/backend/tsconfig.json new file mode 100644 index 0000000..a11f684 --- /dev/null +++ b/packages/backend/tsconfig.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "target": "ES2022", + "module": "NodeNext", + "moduleResolution": "NodeNext", + "lib": ["ES2022"], + "outDir": "./dist", + "rootDir": "./src", + "strict": true, + "skipLibCheck": true, + "esModuleInterop": true, + "experimentalDecorators": true, + "emitDecoratorMetadata": true, + "declaration": true, + "declarationMap": true, + "sourceMap": true + }, + "include": ["src/**/*"], + "exclude": ["node_modules", "dist"] +} diff --git a/packages/frontend/package.json b/packages/frontend/package.json index b8b9abe..00e3398 100644 --- a/packages/frontend/package.json +++ b/packages/frontend/package.json @@ -40,8 +40,9 @@ "vite-plugin-wasm": "3.5.0" }, "dependencies": { - "@directus/sdk": "^21.1.0", "@sexy.pivoine.art/buttplug": "workspace:*", + "graphql": "^16.11.0", + "graphql-request": "^7.1.2", "javascript-time-ago": "^2.6.4", "media-chrome": "^4.18.0", "svelte-i18n": "^4.0.1" diff --git a/packages/frontend/src/hooks.server.ts b/packages/frontend/src/hooks.server.ts index 74c42e8..b0b3456 100644 --- a/packages/frontend/src/hooks.server.ts +++ b/packages/frontend/src/hooks.server.ts @@ -30,7 +30,7 @@ export const handle: Handle = async ({ event, resolve }) => { }); // Handle authentication - const token = cookies.get("directus_session_token"); + const token = cookies.get("session_token"); if (token) { try { @@ -42,7 +42,7 @@ export const handle: Handle = async ({ event, resolve }) => { userId: locals.authStatus.user?.id, context: { email: locals.authStatus.user?.email, - role: locals.authStatus.user?.role?.name, + role: locals.authStatus.user?.role, }, }); } else { diff --git a/packages/frontend/src/lib/api.ts b/packages/frontend/src/lib/api.ts new file mode 100644 index 0000000..a4b81da --- /dev/null +++ b/packages/frontend/src/lib/api.ts @@ -0,0 +1,25 @@ +import { GraphQLClient } from "graphql-request"; +import { env } from "$env/dynamic/public"; +import type { CurrentUser } from "./types"; + +export const apiUrl = env.PUBLIC_API_URL || "http://localhost:3000/api"; + +export const getGraphQLClient = (fetchFn?: typeof globalThis.fetch) => + new GraphQLClient(`${apiUrl}/graphql`, { + credentials: "include", + fetch: fetchFn || globalThis.fetch, + }); + +export const getAssetUrl = ( + id: string, + transform?: "mini" | "thumbnail" | "preview" | "medium" | "banner", +) => { + if (!id) { + return null; + } + return `${apiUrl}/assets/${id}${transform ? "?transform=" + transform : ""}`; +}; + +export const isModel = (user: CurrentUser) => { + return user.role === "model"; +}; diff --git a/packages/frontend/src/lib/directus.ts b/packages/frontend/src/lib/directus.ts index 3d6fb6f..3d5fe32 100644 --- a/packages/frontend/src/lib/directus.ts +++ b/packages/frontend/src/lib/directus.ts @@ -1,35 +1,3 @@ -import { authentication, createDirectus, rest } from "@directus/sdk"; -import { env } from "$env/dynamic/public"; -import type { CurrentUser } from "./types"; - -export const directusApiUrl = env.PUBLIC_API_URL || "http://localhost:3000/api"; - -export const getDirectusInstance = (fetch?: typeof globalThis.fetch) => { - const options: { globals?: { fetch: typeof globalThis.fetch } } = fetch - ? { globals: { fetch } } - : {}; - const directus = createDirectus(directusApiUrl, options) - .with(rest()) - .with(authentication("session")); - return directus; -}; - -export const getAssetUrl = ( - id: string, - transform?: "mini" | "thumbnail" | "preview" | "medium" | "banner", -) => { - if (!id) { - return null; - } - return `${directusApiUrl}/assets/${id}${transform ? "?key=" + transform : ""}`; -}; - -export const isModel = (user: CurrentUser) => { - if (user.role.name === "Model") { - return true; - } - if (user.policies.find((p) => p.policy.name === "Model")) { - return true; - } - return false; -}; +// Re-export from api.ts for backwards compatibility +// All components that import from $lib/directus continue to work +export { apiUrl as directusApiUrl, getAssetUrl, isModel, getGraphQLClient as getDirectusInstance } from "./api.js"; diff --git a/packages/frontend/src/lib/services.ts b/packages/frontend/src/lib/services.ts index 3d9680a..bdf6a33 100644 --- a/packages/frontend/src/lib/services.ts +++ b/packages/frontend/src/lib/services.ts @@ -1,22 +1,18 @@ -import { getDirectusInstance, directusApiUrl } from "$lib/directus"; -import { - readItems, - registerUser, - updateMe, - readMe, - registerUserVerify, - passwordRequest, - passwordReset, - customEndpoint, - readFolders, - deleteFile, - uploadFiles, - createComment, - readComments, - aggregate, -} from "@directus/sdk"; -import type { Analytics, Article, Model, Recording, Stats, User, Video, VideoLikeStatus, VideoLikeResponse, VideoPlayResponse } from "$lib/types"; -import { env } from "$env/dynamic/public"; +import { gql, GraphQLClient } from "graphql-request"; +import { apiUrl, getGraphQLClient } from "$lib/api"; +import type { + Analytics, + Article, + CurrentUser, + Model, + Recording, + Stats, + User, + Video, + VideoLikeStatus, + VideoLikeResponse, + VideoPlayResponse, +} from "$lib/types"; import { logger } from "$lib/logger"; // Helper to log API calls @@ -44,28 +40,36 @@ async function loggedApiCall( } } -const userFields = [ - "*", - { - avatar: ["*"], - policies: ["*", { policy: ["name", "id"] }], - role: ["*", { policies: [{ policy: ["name", "id"] }] }], - }, -]; +// For server-side auth checks: forward cookie header manually +function getAuthClient(token: string, fetchFn?: typeof globalThis.fetch) { + return new GraphQLClient(`${apiUrl}/graphql`, { + fetch: fetchFn || globalThis.fetch, + headers: { cookie: `session_token=${token}` }, + }); +} -export async function isAuthenticated(token: string) { +// ─── Auth ──────────────────────────────────────────────────────────────────── + +const ME_QUERY = gql` + query Me { + me { + id email first_name last_name artist_name slug description tags + role avatar banner email_verified date_created + } + } +`; + +export async function isAuthenticated(token: string, fetchFn?: typeof globalThis.fetch) { return loggedApiCall( "isAuthenticated", async () => { try { - const directus = getDirectusInstance(fetch); - directus.setToken(token); - const user = await directus.request( - readMe({ - fields: userFields, - }), - ); - return { authenticated: true, user }; + const client = getAuthClient(token, fetchFn); + const data = await client.request<{ me: CurrentUser | null }>(ME_QUERY); + if (data.me) { + return { authenticated: true, user: data.me }; + } + return { authenticated: false }; } catch { return { authenticated: false }; } @@ -74,6 +78,43 @@ export async function isAuthenticated(token: string) { ); } +const LOGIN_MUTATION = gql` + mutation Login($email: String!, $password: String!) { + login(email: $email, password: $password) { + id email first_name last_name artist_name slug description tags + role avatar banner email_verified date_created + } + } +`; + +export async function login(email: string, password: string) { + return loggedApiCall( + "login", + async () => { + const data = await getGraphQLClient().request<{ login: CurrentUser }>(LOGIN_MUTATION, { + email, + password, + }); + return data.login; + }, + { email }, + ); +} + +const LOGOUT_MUTATION = gql`mutation Logout { logout }`; + +export async function logout() { + return loggedApiCall("logout", async () => { + await getGraphQLClient().request(LOGOUT_MUTATION); + }); +} + +const REGISTER_MUTATION = gql` + mutation Register($email: String!, $password: String!, $firstName: String!, $lastName: String!) { + register(email: $email, password: $password, firstName: $firstName, lastName: $lastName) + } +`; + export async function register( email: string, password: string, @@ -83,135 +124,138 @@ export async function register( return loggedApiCall( "register", async () => { - const directus = getDirectusInstance(fetch); - return directus.request( - registerUser(email, password, { - verification_url: `${env.PUBLIC_URL || "http://localhost:3000"}/signup/verify`, - first_name: firstName, - last_name: lastName, - }), - ); + await getGraphQLClient().request(REGISTER_MUTATION, { + email, + password, + firstName, + lastName, + }); }, { email, firstName, lastName }, ); } -export async function verify(token: string, fetch?: typeof globalThis.fetch) { +const VERIFY_EMAIL_MUTATION = gql` + mutation VerifyEmail($token: String!) { + verifyEmail(token: $token) + } +`; + +export async function verify(token: string, fetchFn?: typeof globalThis.fetch) { return loggedApiCall( "verify", async () => { - const directus = fetch - ? getDirectusInstance((args) => fetch(args, { redirect: "manual" })) - : getDirectusInstance(fetch); - return directus.request(registerUserVerify(token)); + await getGraphQLClient(fetchFn).request(VERIFY_EMAIL_MUTATION, { token }); }, { hasToken: !!token }, ); } -export async function login(email: string, password: string) { - return loggedApiCall( - "login", - async () => { - const directus = getDirectusInstance(fetch); - return directus.login({ email, password }); - }, - { email }, - ); -} - -export async function logout() { - return loggedApiCall("logout", async () => { - const directus = getDirectusInstance(fetch); - return directus.logout(); - }); -} +const REQUEST_PASSWORD_MUTATION = gql` + mutation RequestPasswordReset($email: String!) { + requestPasswordReset(email: $email) + } +`; export async function requestPassword(email: string) { return loggedApiCall( "requestPassword", async () => { - const directus = getDirectusInstance(fetch); - return directus.request( - passwordRequest(email, `${env.PUBLIC_URL || "http://localhost:3000"}/password/reset`), - ); + await getGraphQLClient().request(REQUEST_PASSWORD_MUTATION, { email }); }, { email }, ); } +const RESET_PASSWORD_MUTATION = gql` + mutation ResetPassword($token: String!, $newPassword: String!) { + resetPassword(token: $token, newPassword: $newPassword) + } +`; + export async function resetPassword(token: string, password: string) { return loggedApiCall( "resetPassword", async () => { - const directus = getDirectusInstance(fetch); - return directus.request(passwordReset(token, password)); + await getGraphQLClient().request(RESET_PASSWORD_MUTATION, { + token, + newPassword: password, + }); }, { hasToken: !!token }, ); } -export async function getArticles(fetch?: typeof globalThis.fetch) { +// ─── Articles ──────────────────────────────────────────────────────────────── + +const ARTICLES_QUERY = gql` + query GetArticles { + articles { + id slug title excerpt content image tags publish_date category featured + author { first_name last_name avatar description } + } + } +`; + +export async function getArticles(fetchFn?: typeof globalThis.fetch) { return loggedApiCall("getArticles", async () => { - const directus = getDirectusInstance(fetch); - return directus.request( - customEndpoint({ - method: "GET", - path: "/sexy/articles", - }), - ); + const data = await getGraphQLClient(fetchFn).request<{ articles: Article[] }>(ARTICLES_QUERY); + return data.articles; }); } -export async function getArticleBySlug( - slug: string, - fetch?: typeof globalThis.fetch, -) { +const ARTICLE_BY_SLUG_QUERY = gql` + query GetArticleBySlug($slug: String!) { + article(slug: $slug) { + id slug title excerpt content image tags publish_date category featured + author { first_name last_name avatar description } + } + } +`; + +export async function getArticleBySlug(slug: string, fetchFn?: typeof globalThis.fetch) { return loggedApiCall( "getArticleBySlug", async () => { - const directus = getDirectusInstance(fetch); - return directus - .request( - readItems("sexy_articles", { - fields: ["*", "author.*"], - filter: { slug: { _eq: slug } }, - }), - ) - .then((articles) => { - if (articles.length === 0) { - throw new Error("Article not found"); - } - return articles[0]; - }); + const data = await getGraphQLClient(fetchFn).request<{ article: Article | null }>( + ARTICLE_BY_SLUG_QUERY, + { slug }, + ); + if (!data.article) throw new Error("Article not found"); + return data.article; }, { slug }, ); } -export async function getVideos(fetch?: typeof globalThis.fetch) { +// ─── Videos ────────────────────────────────────────────────────────────────── + +const VIDEOS_QUERY = gql` + query GetVideos($modelId: String, $featured: Boolean, $limit: Int) { + videos(modelId: $modelId, featured: $featured, limit: $limit) { + id slug title description image movie tags upload_date premium featured + likes_count plays_count + models { id artist_name slug avatar } + movie_file { id filename mime_type duration } + } + } +`; + +export async function getVideos(fetchFn?: typeof globalThis.fetch) { return loggedApiCall("getVideos", async () => { - const directus = getDirectusInstance(fetch); - return directus.request( - customEndpoint({ - method: "GET", - path: "/sexy/videos", - }), - ); + const data = await getGraphQLClient(fetchFn).request<{ videos: Video[] }>(VIDEOS_QUERY); + return data.videos; }); } -export async function getVideosForModel(id, fetch?: typeof globalThis.fetch) { +export async function getVideosForModel(id: string, fetchFn?: typeof globalThis.fetch) { return loggedApiCall( "getVideosForModel", async () => { - const directus = getDirectusInstance(fetch); - return directus.request( - customEndpoint({ - method: "GET", - path: `/sexy/videos?model_id=${id}`, - }), - ); + const data = await getGraphQLClient(fetchFn).request<{ videos: Video[] }>(VIDEOS_QUERY, { + modelId: id, + }); + return data.videos; }, { modelId: id }, ); @@ -224,45 +268,57 @@ export async function getFeaturedVideos( return loggedApiCall( "getFeaturedVideos", async () => { - const url = `${env.PUBLIC_URL}/api/sexy/videos?featured=true&limit=${limit}`; - const response = await fetchFn(url); - if (!response.ok) { - throw new Error(`Failed to fetch featured videos: ${response.statusText}`); - } - return (await response.json()) as Video[]; + const data = await getGraphQLClient(fetchFn).request<{ videos: Video[] }>(VIDEOS_QUERY, { + featured: true, + limit, + }); + return data.videos; }, { limit }, ); } -export async function getVideoBySlug( - slug: string, - fetch?: typeof globalThis.fetch, -) { +const VIDEO_BY_SLUG_QUERY = gql` + query GetVideoBySlug($slug: String!) { + video(slug: $slug) { + id slug title description image movie tags upload_date premium featured + likes_count plays_count + models { id artist_name slug avatar } + movie_file { id filename mime_type duration } + } + } +`; + +export async function getVideoBySlug(slug: string, fetchFn?: typeof globalThis.fetch) { return loggedApiCall( "getVideoBySlug", async () => { - const directus = getDirectusInstance(fetch); - return directus.request