commit 971ef5426d9a02a8d00ba29345ceb24ce34b23bf Author: Sebastian KrΓΌger Date: Wed Nov 5 03:32:14 2025 +0100 feat: initial commit - Scrapyd UI web interface - Next.js 16.0.1 + React 19.2.0 + Tailwind CSS 4.1.16 - Complete Scrapyd API integration (all 12 endpoints) - Dashboard with real-time job monitoring - Projects management (upload, list, delete) - Spiders management with scheduling - Jobs monitoring with filtering and cancellation - System status monitoring - Dark/light theme toggle with next-themes - Server-side authentication via environment variables - Docker deployment with multi-stage builds - GitHub Actions CI/CD workflow πŸ€– Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000..b36a151 --- /dev/null +++ b/.dockerignore @@ -0,0 +1,28 @@ +# Node +node_modules +npm-debug.log* +yarn-debug.log* +yarn-error.log* +pnpm-debug.log* + +# Next.js +.next +out + +# Env +.env*.local +.env + +# IDE +.vscode +.idea +.claude + +# Git +.git +.gitignore + +# Misc +README.md +*.md +.DS_Store diff --git a/.github/workflows/docker-build.yml b/.github/workflows/docker-build.yml new file mode 100644 index 0000000..a8eb9f3 --- /dev/null +++ b/.github/workflows/docker-build.yml @@ -0,0 +1,62 @@ +name: Build and Push Docker Image + +on: + push: + branches: + - main + tags: + - 'v*.*.*' + pull_request: + branches: + - main + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to GitHub Container Registry + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{major}} + type=sha + + - name: Build and push Docker image + uses: docker/build-push-action@v5 + with: + context: . + file: ./Dockerfile + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + platforms: linux/amd64,linux/arm64 diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..5ef6a52 --- /dev/null +++ b/.gitignore @@ -0,0 +1,41 @@ +# See https://help.github.com/articles/ignoring-files/ for more about ignoring files. + +# dependencies +/node_modules +/.pnp +.pnp.* +.yarn/* +!.yarn/patches +!.yarn/plugins +!.yarn/releases +!.yarn/versions + +# testing +/coverage + +# next.js +/.next/ +/out/ + +# production +/build + +# misc +.DS_Store +*.pem + +# debug +npm-debug.log* +yarn-debug.log* +yarn-error.log* +.pnpm-debug.log* + +# env files (can opt-in for committing if needed) +.env* + +# vercel +.vercel + +# typescript +*.tsbuildinfo +next-env.d.ts diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..47fb608 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,51 @@ +# Multi-stage Dockerfile for Next.js production build + +# Stage 1: Dependencies +FROM node:22-alpine AS deps +RUN apk add --no-cache libc6-compat +RUN corepack enable && corepack prepare pnpm@latest --activate +WORKDIR /app + +# Copy package files +COPY package.json pnpm-lock.yaml* ./ +RUN pnpm install --frozen-lockfile + +# Stage 2: Builder +FROM node:22-alpine AS builder +RUN corepack enable && corepack prepare pnpm@latest --activate +WORKDIR /app + +# Copy dependencies from deps stage +COPY --from=deps /app/node_modules ./node_modules +COPY . . + +# Set environment variables for build +ENV NEXT_TELEMETRY_DISABLED=1 + +# Build Next.js +RUN pnpm build + +# Stage 3: Runner +FROM node:22-alpine AS runner +WORKDIR /app + +ENV NODE_ENV=production +ENV NEXT_TELEMETRY_DISABLED=1 + +# Create non-root user +RUN addgroup --system --gid 1001 nodejs +RUN adduser --system --uid 1001 nextjs + +# Copy standalone build +COPY --from=builder /app/public ./public +COPY --from=builder --chown=nextjs:nodejs /app/.next/standalone ./ +COPY --from=builder --chown=nextjs:nodejs /app/.next/static ./.next/static + +USER nextjs + +EXPOSE 3000 + +ENV PORT=3000 +ENV HOSTNAME="0.0.0.0" + +CMD ["node", "server.js"] diff --git a/Dockerfile.dev b/Dockerfile.dev new file mode 100644 index 0000000..40d353b --- /dev/null +++ b/Dockerfile.dev @@ -0,0 +1,20 @@ +# Development Dockerfile with hot reload + +FROM node:22-alpine + +RUN corepack enable && corepack prepare pnpm@latest --activate + +WORKDIR /app + +# Copy package files +COPY package.json pnpm-lock.yaml* ./ + +# Install dependencies +RUN pnpm install + +# Copy application code +COPY . . + +EXPOSE 3000 + +CMD ["pnpm", "dev"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..44c986a --- /dev/null +++ b/README.md @@ -0,0 +1,283 @@ +# Scrapy UI + +A modern, stylish web interface for managing and monitoring [Scrapyd](https://scrapyd.readthedocs.io/) instances. Built with Next.js 15, shadcn/ui, and Tailwind CSS 4. + +## Features + +- **Real-time Monitoring** - Live dashboard with job statistics and system status +- **Project Management** - Upload, list, and delete Scrapy projects and versions +- **Spider Management** - Browse spiders and schedule jobs with custom arguments +- **Job Control** - Monitor running/pending/finished jobs with filtering and cancellation +- **System Status** - View Scrapyd daemon health and metrics +- **Modern UI** - Clean, responsive design with dark/light theme support +- **Secure** - Server-side authentication with environment variables +- **Docker Ready** - Multi-stage builds for production deployment + +## Tech Stack + +- **Next.js 15** (App Router, Server Components) +- **React 19** with Server Actions +- **TypeScript** for type safety +- **Tailwind CSS 4** for styling +- **shadcn/ui** for UI components +- **TanStack Query** for data fetching +- **Zod** for runtime validation +- **Docker** for containerization + +## Prerequisites + +- Node.js 22+ or Docker +- A running Scrapyd instance +- Basic auth credentials for Scrapyd + +## Quick Start + +### 1. Clone the repository + +```bash +git clone +cd scrapy-ui +``` + +### 2. Configure environment variables + +Copy `.env.example` to `.env.local` and update with your credentials: + +```bash +cp .env.example .env.local +``` + +Edit `.env.local`: + +```env +SCRAPYD_URL=https://scrapy.pivoine.art +SCRAPYD_USERNAME=your_username +SCRAPYD_PASSWORD=your_password +``` + +### 3. Run locally + +#### Using pnpm: + +```bash +pnpm install +pnpm dev +``` + +#### Using Docker Compose (development): + +```bash +docker-compose -f docker-compose.dev.yml up +``` + +#### Using Docker Compose (production): + +```bash +docker-compose up -d +``` + +Visit [http://localhost:3000/ui](http://localhost:3000/ui) + +## Project Structure + +``` +scrapy-ui/ +β”œβ”€β”€ app/ # Next.js App Router +β”‚ β”œβ”€β”€ (dashboard)/ # Dashboard route group +β”‚ β”‚ β”œβ”€β”€ page.tsx # Dashboard (/) +β”‚ β”‚ β”œβ”€β”€ projects/ # Projects management +β”‚ β”‚ β”œβ”€β”€ spiders/ # Spiders listing & scheduling +β”‚ β”‚ β”œβ”€β”€ jobs/ # Jobs monitoring & control +β”‚ β”‚ └── system/ # System status +β”‚ β”œβ”€β”€ api/scrapyd/ # API routes (server-side) +β”‚ β”‚ β”œβ”€β”€ daemon/ # GET /api/scrapyd/daemon +β”‚ β”‚ β”œβ”€β”€ projects/ # GET/DELETE /api/scrapyd/projects +β”‚ β”‚ β”œβ”€β”€ versions/ # GET/POST/DELETE /api/scrapyd/versions +β”‚ β”‚ β”œβ”€β”€ spiders/ # GET /api/scrapyd/spiders +β”‚ β”‚ └── jobs/ # GET/POST/DELETE /api/scrapyd/jobs +β”‚ └── layout.tsx # Root layout with theme provider +β”œβ”€β”€ components/ # React components +β”‚ β”œβ”€β”€ ui/ # shadcn/ui components +β”‚ β”œβ”€β”€ sidebar.tsx # Navigation sidebar +β”‚ β”œβ”€β”€ header.tsx # Page header +β”‚ └── theme-toggle.tsx # Dark/light mode toggle +β”œβ”€β”€ lib/ # Utilities & API client +β”‚ β”œβ”€β”€ scrapyd-client.ts # Scrapyd API wrapper +β”‚ β”œβ”€β”€ types.ts # TypeScript types & Zod schemas +β”‚ └── utils.ts # Helper functions +β”œβ”€β”€ Dockerfile # Production build +β”œβ”€β”€ Dockerfile.dev # Development build +β”œβ”€β”€ docker-compose.yml # Production deployment +└── docker-compose.dev.yml # Development deployment +``` + +## API Endpoints + +All Scrapyd endpoints are proxied through Next.js API routes with server-side authentication: + +| Endpoint | Method | Description | +|----------|--------|-------------| +| `/api/scrapyd/daemon` | GET | Daemon status | +| `/api/scrapyd/projects` | GET | List projects | +| `/api/scrapyd/projects` | DELETE | Delete project | +| `/api/scrapyd/versions` | GET | List versions | +| `/api/scrapyd/versions` | POST | Upload version | +| `/api/scrapyd/versions` | DELETE | Delete version | +| `/api/scrapyd/spiders` | GET | List spiders | +| `/api/scrapyd/jobs` | GET | List jobs | +| `/api/scrapyd/jobs` | POST | Schedule job | +| `/api/scrapyd/jobs` | DELETE | Cancel job | + +## Deployment + +### Docker + +#### Build production image: + +```bash +docker build -t scrapy-ui:latest . +``` + +#### Run container: + +```bash +docker run -d \ + -p 3000:3000 \ + -e SCRAPYD_URL=https://scrapy.pivoine.art \ + -e SCRAPYD_USERNAME=your_username \ + -e SCRAPYD_PASSWORD=your_password \ + --name scrapy-ui \ + scrapy-ui:latest +``` + +### GitHub Actions + +The project includes a GitHub Actions workflow (`.github/workflows/docker-build.yml`) that automatically builds and pushes Docker images to GitHub Container Registry on push to `main` or on tagged releases. + +To use it: + +1. Ensure GitHub Actions has write permissions to packages +2. Push code to trigger the workflow +3. Images will be available at `ghcr.io//scrapy-ui` + +### Reverse Proxy (Nginx) + +To deploy under `/ui` path with Nginx: + +```nginx +location /ui/ { + proxy_pass http://localhost:3000/ui/; + proxy_http_version 1.1; + proxy_set_header Upgrade $http_upgrade; + proxy_set_header Connection 'upgrade'; + proxy_set_header Host $host; + proxy_cache_bypass $http_upgrade; +} +``` + +## Configuration + +### Environment Variables + +| Variable | Description | Required | Default | +|----------|-------------|----------|---------| +| `SCRAPYD_URL` | Scrapyd base URL | Yes | `https://scrapy.pivoine.art` | +| `SCRAPYD_USERNAME` | Basic auth username | Yes | - | +| `SCRAPYD_PASSWORD` | Basic auth password | Yes | - | +| `NODE_ENV` | Environment mode | No | `production` | + +### Next.js Configuration + +The `next.config.ts` includes: +- `basePath: "/ui"` - Serves app under `/ui` path +- `output: "standalone"` - Optimized Docker builds +- Optimized imports for `lucide-react` + +## Development + +### Install dependencies: + +```bash +pnpm install +``` + +### Run development server: + +```bash +pnpm dev +``` + +### Build for production: + +```bash +pnpm build +pnpm start +``` + +### Lint code: + +```bash +pnpm lint +``` + +### Add shadcn/ui components: + +```bash +pnpm dlx shadcn@latest add +``` + +## Features in Detail + +### Dashboard +- Real-time job statistics (running, pending, finished) +- System health indicators +- Quick project overview +- Auto-refresh every 30 seconds + +### Projects +- List all Scrapy projects +- Upload new versions (.egg files) +- View version history +- Delete projects/versions +- Drag & drop file upload + +### Spiders +- Browse spiders by project +- Schedule jobs with custom arguments +- JSON argument validation +- Quick schedule dialog + +### Jobs +- Filter by status (pending/running/finished) +- Real-time status updates (5-second refresh) +- Cancel running/pending jobs +- View job logs and items +- Detailed job information + +### System +- Daemon status monitoring +- Job queue statistics +- Environment information +- Auto-refresh every 10 seconds + +## Security + +- **Server-side authentication**: Credentials are stored in environment variables and never exposed to the client +- **API proxy**: All Scrapyd requests go through Next.js API routes +- **Basic auth**: Automatic injection of credentials in request headers +- **No client-side secrets**: Zero credential exposure in browser + +## Contributing + +Contributions are welcome! Please feel free to submit a Pull Request. + +## License + +MIT License - feel free to use this project for personal or commercial purposes. + +## Acknowledgments + +- Built with [Next.js](https://nextjs.org/) +- UI components from [shadcn/ui](https://ui.shadcn.com/) +- Icons from [Lucide](https://lucide.dev/) +- Designed for [Scrapyd](https://scrapyd.readthedocs.io/) diff --git a/app/(dashboard)/jobs/page.tsx b/app/(dashboard)/jobs/page.tsx new file mode 100644 index 0000000..97bd759 --- /dev/null +++ b/app/(dashboard)/jobs/page.tsx @@ -0,0 +1,383 @@ +"use client"; + +import { useState } from "react"; +import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; +import { Header } from "@/components/header"; +import { Button } from "@/components/ui/button"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, +} from "@/components/ui/dialog"; +import { + BriefcaseBusiness, + Clock, + PlayCircle, + CheckCircle2, + XCircle, + ExternalLink, + AlertCircle, +} from "lucide-react"; +import { ListProjects, ListJobs, Job } from "@/lib/types"; +import { format } from "date-fns"; + +export default function JobsPage() { + const queryClient = useQueryClient(); + const [selectedProject, setSelectedProject] = useState(""); + const [statusFilter, setStatusFilter] = useState("all"); + const [cancelDialogOpen, setCancelDialogOpen] = useState(false); + const [selectedJob, setSelectedJob] = useState(null); + + // Fetch projects + const { data: projects, isLoading: isProjectsLoading } = useQuery({ + queryKey: ["projects"], + queryFn: async (): Promise => { + const res = await fetch("/ui/api/scrapyd/projects"); + if (!res.ok) throw new Error("Failed to fetch projects"); + return res.json(); + }, + }); + + // Fetch jobs for selected project + const { data: jobs, isLoading: isJobsLoading } = useQuery({ + queryKey: ["jobs", selectedProject], + queryFn: async (): Promise => { + const res = await fetch(`/ui/api/scrapyd/jobs?project=${selectedProject}`); + if (!res.ok) throw new Error("Failed to fetch jobs"); + return res.json(); + }, + enabled: !!selectedProject, + refetchInterval: 5000, // Refresh every 5 seconds for real-time updates + }); + + // Cancel job mutation + const cancelJobMutation = useMutation({ + mutationFn: async (data: { project: string; job: string }) => { + const res = await fetch("/ui/api/scrapyd/jobs", { + method: "DELETE", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(data), + }); + if (!res.ok) throw new Error("Failed to cancel job"); + return res.json(); + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["jobs"] }); + queryClient.invalidateQueries({ queryKey: ["daemon-status"] }); + setCancelDialogOpen(false); + setSelectedJob(null); + }, + }); + + const handleCancelJob = () => { + if (selectedJob) { + cancelJobMutation.mutate({ + project: selectedProject, + job: selectedJob.id, + }); + } + }; + + // Combine and filter jobs + const allJobs: Array = []; + if (jobs) { + allJobs.push(...jobs.pending.map((j) => ({ ...j, status: "pending" }))); + allJobs.push(...jobs.running.map((j) => ({ ...j, status: "running" }))); + allJobs.push(...jobs.finished.map((j) => ({ ...j, status: "finished" }))); + } + + const filteredJobs = + statusFilter === "all" + ? allJobs + : allJobs.filter((j) => j.status === statusFilter); + + const getStatusBadge = (status: string) => { + switch (status) { + case "pending": + return ( + + + Pending + + ); + case "running": + return ( + + + Running + + ); + case "finished": + return ( + + + Finished + + ); + default: + return {status}; + } + }; + + return ( +
+
+ + {/* Filters */} +
+ + + Project + + + {isProjectsLoading ? ( + + ) : ( + + )} + + + + + + Status Filter + + + + + +
+ + {/* Jobs Statistics */} + {selectedProject && jobs && ( +
+ + + Pending + + + +
{jobs.pending.length}
+
+
+ + + Running + + + +
{jobs.running.length}
+
+
+ + + Finished + + + +
{jobs.finished.length}
+
+
+
+ )} + + {/* Jobs Table */} + {selectedProject && ( + + + + Jobs for "{selectedProject}" + + + + {isJobsLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : filteredJobs.length > 0 ? ( + + + + Job ID + Spider + Status + Start Time + PID + Actions + + + + {filteredJobs.map((job) => ( + + + + {job.id.substring(0, 8)}... + + + +
+ + {job.spider} +
+
+ {getStatusBadge(job.status)} + + + {format(new Date(job.start_time), "PPp")} + + + + {job.pid ? ( + {job.pid} + ) : ( + - + )} + + +
+ {job.log_url && ( + + )} + {(job.status === "pending" || job.status === "running") && ( + + )} +
+
+
+ ))} +
+
+ ) : ( +
+ +

No jobs found

+

+ {statusFilter === "all" + ? "No jobs have been scheduled for this project" + : `No ${statusFilter} jobs found`} +

+
+ )} +
+
+ )} + + {!selectedProject && !isProjectsLoading && ( + + + +

Select a project

+

+ Choose a project to view its jobs +

+
+
+ )} + + {/* Cancel Job Dialog */} + + + + Cancel Job + + Are you sure you want to cancel this job? + {selectedJob && ( +
+

+ Spider: {selectedJob.spider} +

+

+ Job ID: {selectedJob.id} +

+
+ )} +
+
+ + + + +
+
+
+ ); +} diff --git a/app/(dashboard)/layout.tsx b/app/(dashboard)/layout.tsx new file mode 100644 index 0000000..003d7c0 --- /dev/null +++ b/app/(dashboard)/layout.tsx @@ -0,0 +1,19 @@ +import { Sidebar } from "@/components/sidebar"; +import { Providers } from "@/components/providers"; + +export default function DashboardLayout({ + children, +}: { + children: React.ReactNode; +}) { + return ( + +
+ +
+
{children}
+
+
+
+ ); +} diff --git a/app/(dashboard)/page.tsx b/app/(dashboard)/page.tsx new file mode 100644 index 0000000..9e5c93b --- /dev/null +++ b/app/(dashboard)/page.tsx @@ -0,0 +1,173 @@ +"use client"; + +import { useQuery } from "@tanstack/react-query"; +import { Header } from "@/components/header"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + Activity, + FolderKanban, + PlayCircle, + Clock, + CheckCircle2, +} from "lucide-react"; +import { DaemonStatus } from "@/lib/types"; + +export default function DashboardPage() { + const { data: daemonStatus, isLoading: isDaemonLoading } = useQuery({ + queryKey: ["daemon-status"], + queryFn: async (): Promise => { + const res = await fetch("/ui/api/scrapyd/daemon"); + if (!res.ok) throw new Error("Failed to fetch daemon status"); + return res.json(); + }, + }); + + const { data: projects, isLoading: isProjectsLoading } = useQuery({ + queryKey: ["projects"], + queryFn: async () => { + const res = await fetch("/ui/api/scrapyd/projects"); + if (!res.ok) throw new Error("Failed to fetch projects"); + return res.json(); + }, + }); + + const stats = [ + { + title: "Running Jobs", + value: daemonStatus?.running ?? 0, + icon: PlayCircle, + color: "text-green-500", + bgColor: "bg-green-500/10", + }, + { + title: "Pending Jobs", + value: daemonStatus?.pending ?? 0, + icon: Clock, + color: "text-yellow-500", + bgColor: "bg-yellow-500/10", + }, + { + title: "Finished Jobs", + value: daemonStatus?.finished ?? 0, + icon: CheckCircle2, + color: "text-blue-500", + bgColor: "bg-blue-500/10", + }, + { + title: "Total Projects", + value: projects?.projects?.length ?? 0, + icon: FolderKanban, + color: "text-purple-500", + bgColor: "bg-purple-500/10", + }, + ]; + + return ( +
+
+ + {/* Stats Grid */} +
+ {stats.map((stat, index) => ( + + + + {stat.title} + +
+ +
+
+ + {isDaemonLoading || isProjectsLoading ? ( + + ) : ( +
{stat.value}
+ )} +
+
+ ))} +
+ + {/* System Status Card */} + + +
+ System Status + + + {isDaemonLoading ? ( + + ) : ( + daemonStatus?.status + )} + +
+
+ + {isDaemonLoading ? ( +
+ + +
+ ) : ( +
+
+ Node Name: + {daemonStatus?.node_name} +
+
+ Total Jobs: + + {(daemonStatus?.running ?? 0) + + (daemonStatus?.pending ?? 0) + + (daemonStatus?.finished ?? 0)} + +
+
+ )} +
+
+ + {/* Projects Overview */} + + + Projects Overview + + + {isProjectsLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : projects?.projects?.length > 0 ? ( +
+ {projects.projects.map((project: string) => ( +
+
+ + {project} +
+ Active +
+ ))} +
+ ) : ( +

+ No projects found. Upload a project to get started. +

+ )} +
+
+
+ ); +} diff --git a/app/(dashboard)/projects/page.tsx b/app/(dashboard)/projects/page.tsx new file mode 100644 index 0000000..c1af170 --- /dev/null +++ b/app/(dashboard)/projects/page.tsx @@ -0,0 +1,326 @@ +"use client"; + +import { useState } from "react"; +import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; +import { Header } from "@/components/header"; +import { Button } from "@/components/ui/button"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { + FolderKanban, + Upload, + Trash2, + Package, + AlertCircle, +} from "lucide-react"; +import { ListProjects, ListVersions } from "@/lib/types"; + +export default function ProjectsPage() { + const queryClient = useQueryClient(); + const [selectedProject, setSelectedProject] = useState(null); + const [uploadDialogOpen, setUploadDialogOpen] = useState(false); + const [deleteDialogOpen, setDeleteDialogOpen] = useState(false); + + // Fetch projects + const { data: projects, isLoading: isProjectsLoading } = useQuery({ + queryKey: ["projects"], + queryFn: async (): Promise => { + const res = await fetch("/ui/api/scrapyd/projects"); + if (!res.ok) throw new Error("Failed to fetch projects"); + return res.json(); + }, + }); + + // Fetch versions for selected project + const { data: versions } = useQuery({ + queryKey: ["versions", selectedProject], + queryFn: async (): Promise => { + const res = await fetch( + `/ui/api/scrapyd/versions?project=${selectedProject}` + ); + if (!res.ok) throw new Error("Failed to fetch versions"); + return res.json(); + }, + enabled: !!selectedProject, + }); + + // Delete project mutation + const deleteProjectMutation = useMutation({ + mutationFn: async (project: string) => { + const res = await fetch("/ui/api/scrapyd/projects", { + method: "DELETE", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify({ project }), + }); + if (!res.ok) throw new Error("Failed to delete project"); + return res.json(); + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["projects"] }); + setDeleteDialogOpen(false); + setSelectedProject(null); + }, + }); + + // Upload version mutation + const uploadVersionMutation = useMutation({ + mutationFn: async (formData: FormData) => { + const res = await fetch("/ui/api/scrapyd/versions", { + method: "POST", + body: formData, + }); + if (!res.ok) throw new Error("Failed to upload version"); + return res.json(); + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["projects"] }); + queryClient.invalidateQueries({ queryKey: ["versions"] }); + setUploadDialogOpen(false); + }, + }); + + const handleUpload = (e: React.FormEvent) => { + e.preventDefault(); + const formData = new FormData(e.currentTarget); + uploadVersionMutation.mutate(formData); + }; + + return ( +
+
+ + + + +
+ + Upload Project Version + + Upload a Python egg file for your Scrapy project + + +
+
+ + +
+
+ + +
+
+ + +
+
+ + + +
+
+ + } + /> + + {/* Projects List */} + + + All Projects + + + {isProjectsLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : projects?.projects && projects.projects.length > 0 ? ( + + + + Project Name + Versions + Actions + + + + {projects.projects.map((project) => ( + setSelectedProject(project)} + className="cursor-pointer" + > + +
+ + {project} +
+
+ + {selectedProject === project && versions ? ( + + {versions.versions.length} version(s) + + ) : ( + Click to load + )} + + + { + setDeleteDialogOpen(open); + if (open) setSelectedProject(project); + }} + > + + + + + + Delete Project + + Are you sure you want to delete "{project}"? This + action cannot be undone. + + + + + + + + + +
+ ))} +
+
+ ) : ( +
+ +

No projects found

+

+ Upload your first project to get started +

+ +
+ )} +
+
+ + {/* Project Versions */} + {selectedProject && versions && ( + + + + Versions for "{selectedProject}" + + + + {versions.versions.length > 0 ? ( +
+ {versions.versions.map((version) => ( +
+
+ + {version} +
+ + {version === versions.versions[versions.versions.length - 1] + ? "Latest" + : ""} + +
+ ))} +
+ ) : ( +

+ No versions found for this project +

+ )} +
+
+ )} +
+ ); +} diff --git a/app/(dashboard)/spiders/page.tsx b/app/(dashboard)/spiders/page.tsx new file mode 100644 index 0000000..b65ede8 --- /dev/null +++ b/app/(dashboard)/spiders/page.tsx @@ -0,0 +1,282 @@ +"use client"; + +import { useState } from "react"; +import { useQuery, useMutation, useQueryClient } from "@tanstack/react-query"; +import { Header } from "@/components/header"; +import { Button } from "@/components/ui/button"; +import { Card, CardContent, CardHeader, CardTitle } from "@/components/ui/card"; +import { Badge } from "@/components/ui/badge"; +import { Skeleton } from "@/components/ui/skeleton"; +import { + Dialog, + DialogContent, + DialogDescription, + DialogFooter, + DialogHeader, + DialogTitle, + DialogTrigger, +} from "@/components/ui/dialog"; +import { Input } from "@/components/ui/input"; +import { Label } from "@/components/ui/label"; +import { + Select, + SelectContent, + SelectItem, + SelectTrigger, + SelectValue, +} from "@/components/ui/select"; +import { + Table, + TableBody, + TableCell, + TableHead, + TableHeader, + TableRow, +} from "@/components/ui/table"; +import { Bug, PlayCircle, AlertCircle } from "lucide-react"; +import { ListProjects, ListSpiders, ScheduleJob } from "@/lib/types"; +import { Textarea } from "@/components/ui/textarea"; + +export default function SpidersPage() { + const queryClient = useQueryClient(); + const [selectedProject, setSelectedProject] = useState(""); + const [scheduleDialogOpen, setScheduleDialogOpen] = useState(false); + const [selectedSpider, setSelectedSpider] = useState(""); + + // Fetch projects + const { data: projects, isLoading: isProjectsLoading } = useQuery({ + queryKey: ["projects"], + queryFn: async (): Promise => { + const res = await fetch("/ui/api/scrapyd/projects"); + if (!res.ok) throw new Error("Failed to fetch projects"); + return res.json(); + }, + }); + + // Fetch spiders for selected project + const { data: spiders, isLoading: isSpidersLoading } = useQuery({ + queryKey: ["spiders", selectedProject], + queryFn: async (): Promise => { + const res = await fetch( + `/ui/api/scrapyd/spiders?project=${selectedProject}` + ); + if (!res.ok) throw new Error("Failed to fetch spiders"); + return res.json(); + }, + enabled: !!selectedProject, + }); + + // Schedule job mutation + const scheduleJobMutation = useMutation({ + mutationFn: async (data: { + project: string; + spider: string; + args?: Record; + }) => { + const res = await fetch("/ui/api/scrapyd/jobs", { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(data), + }); + if (!res.ok) throw new Error("Failed to schedule job"); + return res.json() as Promise; + }, + onSuccess: () => { + queryClient.invalidateQueries({ queryKey: ["jobs"] }); + setScheduleDialogOpen(false); + setSelectedSpider(""); + }, + }); + + const handleSchedule = (e: React.FormEvent) => { + e.preventDefault(); + const formData = new FormData(e.currentTarget); + const argsStr = formData.get("args") as string; + + let args: Record | undefined; + if (argsStr.trim()) { + try { + args = JSON.parse(argsStr); + } catch (error) { + alert("Invalid JSON format for arguments"); + return; + } + } + + scheduleJobMutation.mutate({ + project: selectedProject, + spider: selectedSpider, + args, + }); + }; + + return ( +
+
+ + {/* Project Selector */} + + + Select Project + + + {isProjectsLoading ? ( + + ) : ( + + )} + + + + {/* Spiders List */} + {selectedProject && ( + + + + Spiders in "{selectedProject}" + + + + {isSpidersLoading ? ( +
+ {[1, 2, 3].map((i) => ( + + ))} +
+ ) : spiders?.spiders && spiders.spiders.length > 0 ? ( + + + + Spider Name + Status + Actions + + + + {spiders.spiders.map((spider) => ( + + +
+ + {spider} +
+
+ + Available + + + { + setScheduleDialogOpen(open); + if (open) setSelectedSpider(spider); + }} + > + + + + +
+ + Schedule Spider Job + + Schedule "{spider}" to run on "{selectedProject}" + + +
+
+ + +
+
+ + +
+
+ +