From 2014a82efbb04ab14fdc309c5ea7012027936f4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Sebastian=20Kr=C3=BCger?= Date: Sun, 16 Nov 2025 16:05:14 +0100 Subject: [PATCH] feat: enable Redis caching for LiteLLM MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Configure LiteLLM to use existing Redis from core stack for caching: - Enabled cache with Redis backend - Set TTL to 1 hour for cached responses - Uses core_redis container on default port This will improve performance by caching API responses. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude --- ai/litellm-config.yaml | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/ai/litellm-config.yaml b/ai/litellm-config.yaml index 88a2d52..1798f3a 100644 --- a/ai/litellm-config.yaml +++ b/ai/litellm-config.yaml @@ -30,8 +30,13 @@ model_list: litellm_settings: drop_params: true set_verbose: false # Disable verbose logging for better performance - # Disable LiteLLM caching (prompt caching at API level is separate) - cache: false + # Enable caching with Redis for better performance + cache: true + cache_params: + type: redis + host: redis + port: 6379 + ttl: 3600 # Cache for 1 hour # Force strip specific parameters globally allowed_fails: 0 # Modify params before sending to provider