llm-proxy / .env.example
relfa's picture
feat: Add Gemini API support and refactor proxy logic for multi-provider extensibility.
3784bc3
# === Required ===
PROXY_AUTH_TOKEN=your-secure-shared-secret # Token that clients use as their API key
# === Providers (at least one required) ===
ANTHROPIC_API_KEY=sk-ant-... # Anthropic API key (enables Anthropic relay when set)
# === Optional ===
PORT=7860 # Default: 7860 (Hugging Face Spaces default)
HOST=0.0.0.0
LOG_LEVEL=info # trace | debug | info | warn | error
# Security
RATE_LIMIT_MAX=100 # Requests per time window per IP
RATE_LIMIT_WINDOW_MS=60000 # Time window in ms
BODY_LIMIT=5242880 # Max body size in bytes (default: 5 MB)
CORS_ORIGIN= # Empty = disabled
# Anthropic upstream
ANTHROPIC_BASE_URL=https://api.anthropic.com # Overridable for testing
UPSTREAM_TIMEOUT_MS=300000 # Upstream request timeout (default: 5 min)
GEMINI_API_KEY= # Google Gemini API key (enables Gemini relay when set)
GEMINI_BASE_URL=https://generativelanguage.googleapis.com # Overridable for testing