Files
docker-compose/ai/litellm-config.yaml

50 lines
1.3 KiB
YAML

model_list:
- model_name: claude-sonnet-4
litellm_params:
model: anthropic/claude-sonnet-4-20250514
api_key: os.environ/ANTHROPIC_API_KEY
- model_name: claude-sonnet-4.5
litellm_params:
model: anthropic/claude-sonnet-4-5-20250929
api_key: os.environ/ANTHROPIC_API_KEY
drop_params: true
additional_drop_params: ["prompt_cache_key"]
- model_name: claude-3-5-sonnet
litellm_params:
model: anthropic/claude-3-5-sonnet-20241022
api_key: os.environ/ANTHROPIC_API_KEY
- model_name: claude-3-opus
litellm_params:
model: anthropic/claude-3-opus-20240229
api_key: os.environ/ANTHROPIC_API_KEY
- model_name: claude-3-haiku
litellm_params:
model: anthropic/claude-3-haiku-20240307
api_key: os.environ/ANTHROPIC_API_KEY
litellm_settings:
drop_params: true
set_verbose: true
# Disable prompt caching features
cache: false
# Force strip specific parameters globally
allowed_fails: 0
# Modify params before sending to provider
modify_params: true
# Drop prompt_cache_key globally for all models
additional_drop_params: ["prompt_cache_key"]
router_settings:
allowed_fails: 0
# Drop unsupported parameters
default_litellm_params:
drop_params: true
general_settings:
disable_responses_id_security: true