From c55f41408a586334354cb4e2e3a668f708ed0598 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Sun, 30 Nov 2025 23:03:32 +0100 Subject: [PATCH] fix(ai): litellm config --- ai/litellm-config.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/ai/litellm-config.yaml b/ai/litellm-config.yaml index b3a8c51..325e2b0 100644 --- a/ai/litellm-config.yaml +++ b/ai/litellm-config.yaml @@ -41,7 +41,6 @@ model_list: stream_timeout: 600 supports_system_messages: true # Llama supports system messages stream: true # Enable streaming by default - max_tokens: 4096 # Cap completion tokens to leave room for input # Embeddings - BGE Large (Port 8002) - model_name: bge-large-en