feat: remove gpu support and simplify to cpu-only
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m13s
All checks were successful
Build and Push Docker Image / build (push) Successful in 1m13s
This commit is contained in:
@@ -5,7 +5,7 @@ import psutil
|
||||
|
||||
from fastapi import APIRouter
|
||||
|
||||
from app.schemas.system import GpuDevice, HealthResponse, SystemInfoResponse
|
||||
from app.schemas.system import HealthResponse, SystemInfoResponse
|
||||
from app.services import facefusion_bridge
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -21,36 +21,11 @@ async def health_check():
|
||||
@router.get('/system', response_model=SystemInfoResponse)
|
||||
async def system_info():
|
||||
providers = facefusion_bridge.get_execution_providers()
|
||||
gpu_devices = _detect_gpu_devices()
|
||||
mem = psutil.virtual_memory()
|
||||
|
||||
return SystemInfoResponse(
|
||||
execution_providers=providers,
|
||||
gpu_devices=gpu_devices,
|
||||
cpu_count=os.cpu_count(),
|
||||
memory_total=mem.total,
|
||||
memory_available=mem.available,
|
||||
)
|
||||
|
||||
|
||||
def _detect_gpu_devices():
|
||||
devices = []
|
||||
try:
|
||||
import subprocess
|
||||
result = subprocess.run(
|
||||
['nvidia-smi', '--query-gpu=index,name,memory.total,memory.used', '--format=csv,noheader,nounits'],
|
||||
capture_output=True, text=True, timeout=5,
|
||||
)
|
||||
if result.returncode == 0:
|
||||
for line in result.stdout.strip().split('\n'):
|
||||
parts = [p.strip() for p in line.split(',')]
|
||||
if len(parts) >= 4:
|
||||
devices.append(GpuDevice(
|
||||
id=int(parts[0]),
|
||||
name=parts[1],
|
||||
memory_total=int(float(parts[2])) * 1024 * 1024,
|
||||
memory_used=int(float(parts[3])) * 1024 * 1024,
|
||||
))
|
||||
except (FileNotFoundError, subprocess.TimeoutExpired):
|
||||
pass
|
||||
return devices
|
||||
|
||||
@@ -104,7 +104,6 @@ class ProcessingOptions(BaseModel):
|
||||
face_detector: Optional[FaceDetectorOptions] = None
|
||||
face_selector: Optional[FaceSelectorOptions] = None
|
||||
output: Optional[OutputOptions] = None
|
||||
execution_providers: Optional[List[str]] = None
|
||||
execution_thread_count: Optional[int] = None
|
||||
video_memory_strategy: Optional[str] = None
|
||||
|
||||
|
||||
@@ -7,16 +7,8 @@ class HealthResponse(BaseModel):
|
||||
status: str = 'ok'
|
||||
|
||||
|
||||
class GpuDevice(BaseModel):
|
||||
id: int
|
||||
name: str
|
||||
memory_total: Optional[int] = None
|
||||
memory_used: Optional[int] = None
|
||||
|
||||
|
||||
class SystemInfoResponse(BaseModel):
|
||||
execution_providers: List[str]
|
||||
gpu_devices: List[GpuDevice]
|
||||
cpu_count: Optional[int] = None
|
||||
memory_total: Optional[int] = None
|
||||
memory_available: Optional[int] = None
|
||||
|
||||
@@ -339,8 +339,6 @@ def build_args_from_options(
|
||||
args[f'output_{key}'] = out[key]
|
||||
|
||||
# Execution overrides
|
||||
if 'execution_providers' in options:
|
||||
args['execution_providers'] = options['execution_providers']
|
||||
if 'execution_thread_count' in options:
|
||||
args['execution_thread_count'] = options['execution_thread_count']
|
||||
if 'video_memory_strategy' in options:
|
||||
|
||||
Reference in New Issue
Block a user