Initial commit: FaceFusion REST API

FastAPI wrapper around FaceFusion v3.5.3 submodule with:
- Sync and async (job-based) processing endpoints
- FaceFusion bridge with manual key registration and Lock-serialized processing
- Multi-target Dockerfile (CPU + CUDA GPU)
- Docker Compose configs for dev, prod-cpu, and prod-gpu
- Gitea CI/CD workflow with dual image builds
- All 11 FaceFusion processors supported via options API

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-16 12:58:33 +01:00
commit 800edc08ea
31 changed files with 1784 additions and 0 deletions

0
app/routers/__init__.py Normal file
View File

105
app/routers/jobs.py Normal file
View File

@@ -0,0 +1,105 @@
import json
import logging
import uuid
from typing import List, Optional
from fastapi import APIRouter, File, Form, HTTPException, UploadFile
from fastapi.responses import FileResponse
from app.schemas.jobs import JobCreateResponse, JobDeleteResponse, JobStatus, JobStatusResponse
from app.schemas.process import ProcessingOptions
from app.services import facefusion_bridge, file_manager
from app.services.worker import worker_queue
logger = logging.getLogger(__name__)
router = APIRouter(prefix='/api/v1/jobs', tags=['jobs'])
@router.post('', response_model=JobCreateResponse)
async def create_job(
target: UploadFile = File(...),
source: Optional[List[UploadFile]] = File(None),
options: Optional[str] = Form(None),
):
"""Create an async processing job."""
job_id = str(uuid.uuid4())
request_dir = file_manager.create_request_dir()
try:
parsed_options = None
if options:
try:
parsed_options = json.loads(options)
ProcessingOptions(**parsed_options)
except (json.JSONDecodeError, Exception) as e:
raise HTTPException(status_code=422, detail=f'Invalid options: {e}')
target_path = await file_manager.save_upload(target, request_dir)
source_paths = []
if source:
source_paths = await file_manager.save_uploads(source, request_dir)
output_path = file_manager.generate_output_path(target_path)
args = facefusion_bridge.build_args_from_options(
source_paths=source_paths,
target_path=target_path,
output_path=output_path,
options=parsed_options,
)
worker_queue.submit(job_id, args)
return JobCreateResponse(job_id=job_id, status=JobStatus.pending)
except HTTPException:
raise
except Exception as e:
file_manager.cleanup_directory(request_dir)
logger.error(f'Job creation failed: {e}')
raise HTTPException(status_code=500, detail=str(e))
@router.get('/{job_id}', response_model=JobStatusResponse)
async def get_job_status(job_id: str):
"""Get job status."""
job = worker_queue.get_job(job_id)
if not job:
raise HTTPException(status_code=404, detail='Job not found')
return JobStatusResponse(
job_id=job.job_id,
status=JobStatus(job.status.value),
created_at=job.created_at,
updated_at=job.updated_at,
error=job.error,
)
@router.get('/{job_id}/result')
async def get_job_result(job_id: str):
"""Download job result. Only available when job is completed."""
job = worker_queue.get_job(job_id)
if not job:
raise HTTPException(status_code=404, detail='Job not found')
if job.status != 'completed':
raise HTTPException(status_code=409, detail=f'Job status is {job.status}, not completed')
if not job.output_path:
raise HTTPException(status_code=500, detail='No output file')
return FileResponse(
path=job.output_path,
media_type='application/octet-stream',
)
@router.delete('/{job_id}', response_model=JobDeleteResponse)
async def delete_job(job_id: str):
"""Cancel/delete a job."""
deleted = worker_queue.delete_job(job_id)
if not deleted:
raise HTTPException(status_code=404, detail='Job not found')
return JobDeleteResponse(job_id=job_id, deleted=True)

70
app/routers/process.py Normal file
View File

@@ -0,0 +1,70 @@
import json
import logging
from time import time
from typing import List, Optional
from fastapi import APIRouter, File, Form, HTTPException, UploadFile
from fastapi.responses import FileResponse
from app.schemas.process import ProcessingOptions
from app.services import facefusion_bridge, file_manager
logger = logging.getLogger(__name__)
router = APIRouter(prefix='/api/v1', tags=['processing'])
@router.post('/process')
async def process_sync(
target: UploadFile = File(...),
source: Optional[List[UploadFile]] = File(None),
options: Optional[str] = Form(None),
):
"""Synchronous face processing. Returns the result file directly."""
request_dir = file_manager.create_request_dir()
try:
# Parse options
parsed_options = None
if options:
try:
parsed_options = json.loads(options)
ProcessingOptions(**parsed_options) # validate
except (json.JSONDecodeError, Exception) as e:
raise HTTPException(status_code=422, detail=f'Invalid options: {e}')
# Save uploads
target_path = await file_manager.save_upload(target, request_dir)
source_paths = []
if source:
source_paths = await file_manager.save_uploads(source, request_dir)
output_path = file_manager.generate_output_path(target_path)
# Build args and process
args = facefusion_bridge.build_args_from_options(
source_paths=source_paths,
target_path=target_path,
output_path=output_path,
options=parsed_options,
)
start_time = time()
facefusion_bridge.process_sync(args)
processing_time = time() - start_time
logger.info(f'Sync processing completed in {processing_time:.2f}s')
return FileResponse(
path=output_path,
media_type='application/octet-stream',
filename=target.filename,
headers={'X-Processing-Time': f'{processing_time:.2f}'},
)
except HTTPException:
raise
except Exception as e:
logger.error(f'Processing failed: {e}')
raise HTTPException(status_code=500, detail=str(e))
finally:
file_manager.cleanup_directory(request_dir)

47
app/routers/processors.py Normal file
View File

@@ -0,0 +1,47 @@
import logging
from typing import List
from fastapi import APIRouter, HTTPException
from app.schemas.system import ModelInfo, ProcessorInfo
from app.services import facefusion_bridge, file_manager
logger = logging.getLogger(__name__)
router = APIRouter(prefix='/api/v1', tags=['processors'])
@router.get('/processors', response_model=List[ProcessorInfo])
async def list_processors():
"""List available processors and their models."""
try:
processor_names = facefusion_bridge.get_available_processors()
result = []
for name in processor_names:
result.append(ProcessorInfo(name=name, models=[]))
return result
except Exception as e:
logger.error(f'Failed to list processors: {e}')
raise HTTPException(status_code=500, detail=str(e))
@router.get('/models', response_model=List[ModelInfo])
async def list_models():
"""List downloaded model files."""
models = file_manager.list_model_files()
return [ModelInfo(name=name, path=path, size_bytes=size) for name, path, size in models]
@router.post('/models/download')
async def download_models(processors: List[str] = None):
"""Trigger model download for specified processors."""
try:
success = facefusion_bridge.force_download_models(processors)
if success:
return {'status': 'ok', 'message': 'Models downloaded successfully'}
raise HTTPException(status_code=500, detail='Some models failed to download')
except HTTPException:
raise
except Exception as e:
logger.error(f'Model download failed: {e}')
raise HTTPException(status_code=500, detail=str(e))

56
app/routers/system.py Normal file
View File

@@ -0,0 +1,56 @@
import logging
import os
import psutil
from fastapi import APIRouter
from app.schemas.system import GpuDevice, HealthResponse, SystemInfoResponse
from app.services import facefusion_bridge
logger = logging.getLogger(__name__)
router = APIRouter(prefix='/api/v1', tags=['system'])
@router.get('/health', response_model=HealthResponse)
async def health_check():
return HealthResponse()
@router.get('/system', response_model=SystemInfoResponse)
async def system_info():
providers = facefusion_bridge.get_execution_providers()
gpu_devices = _detect_gpu_devices()
mem = psutil.virtual_memory()
return SystemInfoResponse(
execution_providers=providers,
gpu_devices=gpu_devices,
cpu_count=os.cpu_count(),
memory_total=mem.total,
memory_available=mem.available,
)
def _detect_gpu_devices():
devices = []
try:
import subprocess
result = subprocess.run(
['nvidia-smi', '--query-gpu=index,name,memory.total,memory.used', '--format=csv,noheader,nounits'],
capture_output=True, text=True, timeout=5,
)
if result.returncode == 0:
for line in result.stdout.strip().split('\n'):
parts = [p.strip() for p in line.split(',')]
if len(parts) >= 4:
devices.append(GpuDevice(
id=int(parts[0]),
name=parts[1],
memory_total=int(float(parts[2])) * 1024 * 1024,
memory_used=int(float(parts[3])) * 1024 * 1024,
))
except (FileNotFoundError, subprocess.TimeoutExpired):
pass
return devices