litellm-proxy / config.yaml
parson's picture
Deploy LiteLLM proxy
3267348 verified
model_list:
# Friendly aliases (recommended)
- model_name: gpt-4o-mini
litellm_params:
model: openai/gpt-4o-mini
# Third-party OpenAI-compatible endpoint
api_base: "os.environ/GMN_API_BASE"
api_key: "os.environ/GMN_API_KEY"
# Some gateways/WAFs require specific headers to allow /v1/responses
extra_headers:
OpenAI-Beta: "responses=v1"
User-Agent: "curl/8.0"
# Pick a modern, fast Claude Sonnet variant as the default alias.
# You can still call any Anthropic model via `anthropic/<model>` because of the wildcard route.
- model_name: claude-sonnet
litellm_params:
model: anthropic/claude-3-7-sonnet-20250219
api_key: "os.environ/ANTHROPIC_API_KEY"
# Volcengine ARK (Anthropic-compatible)
- model_name: ark-code-latest
litellm_params:
model: anthropic/ark-code-latest
api_base: "os.environ/ARK_ANTHROPIC_BASE_URL"
api_key: "os.environ/ARK_ANTHROPIC_AUTH_TOKEN"
# Optional: allow explicit provider-prefixed model names
- model_name: openai/*
litellm_params:
model: openai/*
api_base: "os.environ/GMN_API_BASE"
api_key: "os.environ/GMN_API_KEY"
extra_headers:
OpenAI-Beta: "responses=v1"
User-Agent: "curl/8.0"
model_info:
health_check_model: openai/gpt-4o-mini
- model_name: anthropic/*
litellm_params:
model: anthropic/*
api_key: "os.environ/ANTHROPIC_API_KEY"
model_info:
health_check_model: anthropic/claude-3-7-sonnet-20250219
litellm_settings:
# Improves compatibility when clients send provider-specific params.
drop_params: true
general_settings:
# Require Authorization: Bearer <key>.
master_key: "os.environ/LITELLM_MASTER_KEY"