agentic-api / docker-compose.yml
MiniMax Agent
Add complete local Ollama setup with OpenELM - includes setup script, API server, test scripts, and documentation
41831f1
version: '3.8'
services:
ollama:
image: ollama/ollama:latest
container_name: ollama
ports:
- "127.0.0.1:11434:11434"
volumes:
- ollama_data:/root/.ollama
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 30s
timeout: 10s
retries: 3
api:
build:
context: .
dockerfile: Dockerfile.api
container_name: openelm-api
ports:
- "8001:8000"
environment:
- OLLAMA_BASE_URL=http://ollama:11434
- OLLAMA_MODEL=apple/OpenELM-3B-Instruct
depends_on:
ollama:
condition: service_healthy
restart: unless-stopped
volumes:
ollama_data: