From 94ab4ae6dd066e5c39a38126bdabd51a25648f08 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Sun, 23 Nov 2025 14:36:34 +0100 Subject: [PATCH] feat: enable system message support for qwen-2.5-7b --- ai/litellm-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ai/litellm-config.yaml b/ai/litellm-config.yaml index 15e0c02..4a61bf0 100644 --- a/ai/litellm-config.yaml +++ b/ai/litellm-config.yaml @@ -39,7 +39,7 @@ model_list: tpm: 100000 timeout: 600 # 10 minutes for generation stream_timeout: 600 - supports_system_messages: false # vLLM handles system messages differently + supports_system_messages: true # Qwen supports system messages stream: true # Enable streaming by default - model_name: llama-3.1-8b