feat: initial commit - Scrapyd UI web interface
- Next.js 16.0.1 + React 19.2.0 + Tailwind CSS 4.1.16 - Complete Scrapyd API integration (all 12 endpoints) - Dashboard with real-time job monitoring - Projects management (upload, list, delete) - Spiders management with scheduling - Jobs monitoring with filtering and cancellation - System status monitoring - Dark/light theme toggle with next-themes - Server-side authentication via environment variables - Docker deployment with multi-stage builds - GitHub Actions CI/CD workflow 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
216
lib/scrapyd-client.ts
Normal file
216
lib/scrapyd-client.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import {
|
||||
DaemonStatusSchema,
|
||||
ListProjectsSchema,
|
||||
ListVersionsSchema,
|
||||
ListSpidersSchema,
|
||||
ListJobsSchema,
|
||||
ScheduleJobSchema,
|
||||
CancelJobSchema,
|
||||
DeleteVersionSchema,
|
||||
DeleteProjectSchema,
|
||||
AddVersionSchema,
|
||||
type ScheduleJobParams,
|
||||
type CancelJobParams,
|
||||
type ListVersionsParams,
|
||||
type ListSpidersParams,
|
||||
type ListJobsParams,
|
||||
type DeleteVersionParams,
|
||||
type DeleteProjectParams,
|
||||
} from "./types";
|
||||
|
||||
// Get credentials from environment variables (server-side only)
|
||||
const SCRAPYD_URL = process.env.SCRAPYD_URL || "https://scrapy.pivoine.art";
|
||||
const SCRAPYD_USERNAME = process.env.SCRAPYD_USERNAME || "";
|
||||
const SCRAPYD_PASSWORD = process.env.SCRAPYD_PASSWORD || "";
|
||||
|
||||
/**
|
||||
* Create Basic Auth header
|
||||
*/
|
||||
function getAuthHeader(): string {
|
||||
const credentials = Buffer.from(`${SCRAPYD_USERNAME}:${SCRAPYD_PASSWORD}`).toString("base64");
|
||||
return `Basic ${credentials}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base fetch wrapper with auth
|
||||
*/
|
||||
async function fetchScrapyd(endpoint: string, options: RequestInit = {}) {
|
||||
const url = `${SCRAPYD_URL}/${endpoint}`;
|
||||
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers: {
|
||||
Authorization: getAuthHeader(),
|
||||
...options.headers,
|
||||
},
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Scrapyd API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return response.json();
|
||||
}
|
||||
|
||||
/**
|
||||
* ScrapydClient - Server-side API client for Scrapyd
|
||||
* All methods use environment variables for authentication
|
||||
*/
|
||||
export const ScrapydClient = {
|
||||
/**
|
||||
* Get daemon status
|
||||
*/
|
||||
async getDaemonStatus() {
|
||||
const data = await fetchScrapyd("daemonstatus.json");
|
||||
return DaemonStatusSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* List all projects
|
||||
*/
|
||||
async listProjects() {
|
||||
const data = await fetchScrapyd("listprojects.json");
|
||||
return ListProjectsSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* List versions for a project
|
||||
*/
|
||||
async listVersions(params: ListVersionsParams) {
|
||||
const url = new URLSearchParams({ project: params.project });
|
||||
const data = await fetchScrapyd(`listversions.json?${url}`);
|
||||
return ListVersionsSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* List spiders for a project
|
||||
*/
|
||||
async listSpiders(params: ListSpidersParams) {
|
||||
const url = new URLSearchParams({
|
||||
project: params.project,
|
||||
...(params.version && { _version: params.version }),
|
||||
});
|
||||
const data = await fetchScrapyd(`listspiders.json?${url}`);
|
||||
return ListSpidersSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* List jobs (pending, running, finished) for a project
|
||||
*/
|
||||
async listJobs(params: ListJobsParams) {
|
||||
const url = new URLSearchParams({ project: params.project });
|
||||
const data = await fetchScrapyd(`listjobs.json?${url}`);
|
||||
return ListJobsSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* Schedule a spider job
|
||||
*/
|
||||
async scheduleJob(params: ScheduleJobParams) {
|
||||
const formData = new URLSearchParams({
|
||||
project: params.project,
|
||||
spider: params.spider,
|
||||
...(params.jobid && { jobid: params.jobid }),
|
||||
});
|
||||
|
||||
// Add custom settings
|
||||
if (params.settings) {
|
||||
Object.entries(params.settings).forEach(([key, value]) => {
|
||||
formData.append(`setting`, `${key}=${value}`);
|
||||
});
|
||||
}
|
||||
|
||||
// Add spider arguments
|
||||
if (params.args) {
|
||||
Object.entries(params.args).forEach(([key, value]) => {
|
||||
formData.append(key, value);
|
||||
});
|
||||
}
|
||||
|
||||
const data = await fetchScrapyd("schedule.json", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
body: formData.toString(),
|
||||
});
|
||||
|
||||
return ScheduleJobSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* Cancel a job
|
||||
*/
|
||||
async cancelJob(params: CancelJobParams) {
|
||||
const formData = new URLSearchParams({
|
||||
project: params.project,
|
||||
job: params.job,
|
||||
});
|
||||
|
||||
const data = await fetchScrapyd("cancel.json", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
body: formData.toString(),
|
||||
});
|
||||
|
||||
return CancelJobSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a project version
|
||||
*/
|
||||
async deleteVersion(params: DeleteVersionParams) {
|
||||
const formData = new URLSearchParams({
|
||||
project: params.project,
|
||||
version: params.version,
|
||||
});
|
||||
|
||||
const data = await fetchScrapyd("delversion.json", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
body: formData.toString(),
|
||||
});
|
||||
|
||||
return DeleteVersionSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a project
|
||||
*/
|
||||
async deleteProject(params: DeleteProjectParams) {
|
||||
const formData = new URLSearchParams({
|
||||
project: params.project,
|
||||
});
|
||||
|
||||
const data = await fetchScrapyd("delproject.json", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/x-www-form-urlencoded",
|
||||
},
|
||||
body: formData.toString(),
|
||||
});
|
||||
|
||||
return DeleteProjectSchema.parse(data);
|
||||
},
|
||||
|
||||
/**
|
||||
* Add/upload a project version (egg file)
|
||||
*/
|
||||
async addVersion(project: string, version: string, eggFile: Buffer) {
|
||||
const formData = new FormData();
|
||||
formData.append("project", project);
|
||||
formData.append("version", version);
|
||||
formData.append("egg", new Blob([new Uint8Array(eggFile)]), "project.egg");
|
||||
|
||||
const data = await fetchScrapyd("addversion.json", {
|
||||
method: "POST",
|
||||
body: formData,
|
||||
});
|
||||
|
||||
return AddVersionSchema.parse(data);
|
||||
},
|
||||
};
|
||||
120
lib/types.ts
Normal file
120
lib/types.ts
Normal file
@@ -0,0 +1,120 @@
|
||||
import { z } from "zod";
|
||||
|
||||
// Scrapyd API Response Schemas
|
||||
export const DaemonStatusSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
node_name: z.string(),
|
||||
pending: z.number(),
|
||||
running: z.number(),
|
||||
finished: z.number(),
|
||||
});
|
||||
|
||||
export const ListProjectsSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
projects: z.array(z.string()),
|
||||
});
|
||||
|
||||
export const ListVersionsSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
versions: z.array(z.string()),
|
||||
});
|
||||
|
||||
export const ListSpidersSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
spiders: z.array(z.string()),
|
||||
});
|
||||
|
||||
export const JobSchema = z.object({
|
||||
id: z.string(),
|
||||
spider: z.string(),
|
||||
pid: z.number().optional(),
|
||||
start_time: z.string(),
|
||||
end_time: z.string().optional(),
|
||||
log_url: z.string().optional(),
|
||||
items_url: z.string().optional(),
|
||||
});
|
||||
|
||||
export const ListJobsSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
pending: z.array(JobSchema),
|
||||
running: z.array(JobSchema),
|
||||
finished: z.array(JobSchema),
|
||||
});
|
||||
|
||||
export const ScheduleJobSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
jobid: z.string(),
|
||||
});
|
||||
|
||||
export const CancelJobSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
prevstate: z.enum(["pending", "running", "finished"]),
|
||||
});
|
||||
|
||||
export const DeleteVersionSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
});
|
||||
|
||||
export const DeleteProjectSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
});
|
||||
|
||||
export const AddVersionSchema = z.object({
|
||||
status: z.literal("ok"),
|
||||
spiders: z.number(),
|
||||
});
|
||||
|
||||
// TypeScript Types
|
||||
export type DaemonStatus = z.infer<typeof DaemonStatusSchema>;
|
||||
export type ListProjects = z.infer<typeof ListProjectsSchema>;
|
||||
export type ListVersions = z.infer<typeof ListVersionsSchema>;
|
||||
export type ListSpiders = z.infer<typeof ListSpidersSchema>;
|
||||
export type Job = z.infer<typeof JobSchema>;
|
||||
export type ListJobs = z.infer<typeof ListJobsSchema>;
|
||||
export type ScheduleJob = z.infer<typeof ScheduleJobSchema>;
|
||||
export type CancelJob = z.infer<typeof CancelJobSchema>;
|
||||
export type DeleteVersion = z.infer<typeof DeleteVersionSchema>;
|
||||
export type DeleteProject = z.infer<typeof DeleteProjectSchema>;
|
||||
export type AddVersion = z.infer<typeof AddVersionSchema>;
|
||||
|
||||
// Request Parameters
|
||||
export interface ScheduleJobParams {
|
||||
project: string;
|
||||
spider: string;
|
||||
jobid?: string;
|
||||
settings?: Record<string, string>;
|
||||
args?: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface CancelJobParams {
|
||||
project: string;
|
||||
job: string;
|
||||
}
|
||||
|
||||
export interface ListVersionsParams {
|
||||
project: string;
|
||||
}
|
||||
|
||||
export interface ListSpidersParams {
|
||||
project: string;
|
||||
version?: string;
|
||||
}
|
||||
|
||||
export interface ListJobsParams {
|
||||
project: string;
|
||||
}
|
||||
|
||||
export interface DeleteVersionParams {
|
||||
project: string;
|
||||
version: string;
|
||||
}
|
||||
|
||||
export interface DeleteProjectParams {
|
||||
project: string;
|
||||
}
|
||||
|
||||
export interface AddVersionParams {
|
||||
project: string;
|
||||
version: string;
|
||||
egg: File;
|
||||
}
|
||||
6
lib/utils.ts
Normal file
6
lib/utils.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { clsx, type ClassValue } from "clsx"
|
||||
import { twMerge } from "tailwind-merge"
|
||||
|
||||
export function cn(...inputs: ClassValue[]) {
|
||||
return twMerge(clsx(inputs))
|
||||
}
|
||||
Reference in New Issue
Block a user