litellmGUI / .env.example
github-actions[bot]
deploy: b1de43e β€” ζ›΄ζ–° README.md
e1d8498
# ─────────────────────────────────────────────────────────────────────────────
# AI Gateway Hub β€” Environment Configuration
# Copy this file to .env and fill in your values.
# ─────────────────────────────────────────────────────────────────────────────
# ── Security ──────────────────────────────────────────────────────────────────
# Master key for LiteLLM proxy admin API.
# Change this in production!
LITELLM_MASTER_KEY=sk-gateway-master-key-change-me
# JWT secret for backend sessions (future use).
JWT_SECRET=super-secret-jwt-key-change-in-production
# ── Networking ────────────────────────────────────────────────────────────────
# The public URL where your gateway is accessible.
# This is used to generate the OpenAI-compatible endpoint URLs shown in the UI.
GATEWAY_PUBLIC_URL=http://localhost
# HTTP/HTTPS ports for the nginx reverse proxy.
HTTP_PORT=80
HTTPS_PORT=443
# ── Optional: Pre-configured provider keys ───────────────────────────────────
# These can be set here as env vars for providers configured in litellm/config.yaml.
# OPENAI_API_KEY=sk-...
# ANTHROPIC_API_KEY=sk-ant-...
# GROQ_API_KEY=gsk_...
# GEMINI_API_KEY=...
# COHERE_API_KEY=...
# MISTRAL_API_KEY=...
# TOGETHER_API_KEY=...
# PERPLEXITYAI_API_KEY=pplx-...
# ── Logging ───────────────────────────────────────────────────────────────────
# BUG FIX #1: Was "info" (level 2) which silently drops all Morgan HTTP logs
# (level 3). Must be "http" or lower to see HTTP access logs in the console.
# Winston level hierarchy: error(0) < warn(1) < info(2) < http(3) < verbose(4)
LOG_LEVEL=http