feat(ai): bge over litellm

This commit is contained in:
2025-11-30 20:12:07 +01:00
parent 35e0f232f9
commit 120bf7c385
2 changed files with 10 additions and 0 deletions

View File

@@ -105,6 +105,7 @@ services:
LITELLM_MASTER_KEY: ${AI_LITELLM_API_KEY} LITELLM_MASTER_KEY: ${AI_LITELLM_API_KEY}
DATABASE_URL: postgresql://${AI_DB_USER}:${AI_DB_PASSWORD}@ai_postgres:5432/litellm DATABASE_URL: postgresql://${AI_DB_USER}:${AI_DB_PASSWORD}@ai_postgres:5432/litellm
GPU_VLLM_LLAMA_URL: ${GPU_VLLM_LLAMA_URL} GPU_VLLM_LLAMA_URL: ${GPU_VLLM_LLAMA_URL}
GPU_VLLM_BGE_URL: ${GPU_VLLM_BGE_URL}
# LITELLM_DROP_PARAMS: 'true' # DISABLED: Was breaking streaming # LITELLM_DROP_PARAMS: 'true' # DISABLED: Was breaking streaming
NO_DOCS: "true" NO_DOCS: "true"
NO_REDOC: "true" NO_REDOC: "true"

View File

@@ -42,6 +42,15 @@ model_list:
supports_system_messages: true # Llama supports system messages supports_system_messages: true # Llama supports system messages
stream: true # Enable streaming by default stream: true # Enable streaming by default
# Embeddings - BGE Large (Port 8002)
- model_name: bge-large-en
litellm_params:
model: openai/BAAI/bge-large-en-v1.5
api_base: os.environ/GPU_VLLM_BGE_URL
api_key: "EMPTY"
rpm: 1000
tpm: 500000
litellm_settings: litellm_settings:
drop_params: false # DISABLED: Was breaking streaming drop_params: false # DISABLED: Was breaking streaming
set_verbose: true # Enable verbose logging for debugging streaming issues set_verbose: true # Enable verbose logging for debugging streaming issues