llm-proxy / docker-compose.yml
relfa's picture
feat: initial LLM gateway proxy for Claude Code
ae9d2aa
services:
ai-proxy:
build: .
ports:
- "${PORT:-7860}:${PORT:-7860}"
env_file:
- .env
restart: unless-stopped
healthcheck:
test: [ "CMD", "wget", "--spider", "-q", "http://localhost:${PORT:-7860}/health" ]
interval: 30s
timeout: 5s
retries: 3
start_period: 10s