Spaces:
Sleeping
Sleeping
File size: 4,064 Bytes
bff1348 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 | # docker-compose.yml β Picarones
#
# Services disponibles :
# - picarones : interface web + benchmarks (port 8000)
# - ollama : LLMs locaux (port 11434, profil optionnel)
#
# Usage :
# docker compose up -d # Picarones seul
# docker compose --profile ollama up -d # Picarones + Ollama
# docker compose down
#
# Variables d'environnement :
# CrΓ©er un fichier .env Γ la racine (voir .env.example)
services:
# ββββββββββββββββββββββββββββββββββββββββββββββββ
# Service principal : Picarones
# ββββββββββββββββββββββββββββββββββββββββββββββββ
picarones:
build:
context: .
dockerfile: Dockerfile
target: runtime
image: picarones:latest
container_name: picarones
restart: unless-stopped
ports:
- "${PICARONES_PORT:-8000}:8000"
volumes:
# Corpus Γ benchmarker (lecture seule)
- "${CORPUS_DIR:-./corpus}:/app/corpus:ro"
# Rapports gΓ©nΓ©rΓ©s (lecture/Γ©criture)
- "${RAPPORTS_DIR:-./rapports}:/app/rapports:rw"
# Historique SQLite (persistant)
- picarones_history:/home/picarones/.picarones
environment:
# LLM APIs
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- MISTRAL_API_KEY=${MISTRAL_API_KEY:-}
# OCR cloud APIs
- GOOGLE_APPLICATION_CREDENTIALS=${GOOGLE_APPLICATION_CREDENTIALS:-}
- AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID:-}
- AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY:-}
- AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION:-eu-west-1}
- AZURE_DOC_INTEL_ENDPOINT=${AZURE_DOC_INTEL_ENDPOINT:-}
- AZURE_DOC_INTEL_KEY=${AZURE_DOC_INTEL_KEY:-}
# Ollama (si le service ollama est actif)
- OLLAMA_BASE_URL=http://ollama:11434
# Python
- PYTHONUNBUFFERED=1
- PYTHONIOENCODING=utf-8
depends_on:
- ollama
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 10s
retries: 3
start_period: 20s
networks:
- picarones_net
# ββββββββββββββββββββββββββββββββββββββββββββββββ
# Service optionnel : Ollama (LLMs locaux)
# Activer avec : docker compose --profile ollama up
# ββββββββββββββββββββββββββββββββββββββββββββββββ
ollama:
image: ollama/ollama:latest
container_name: picarones_ollama
restart: unless-stopped
profiles:
- ollama
ports:
- "${OLLAMA_PORT:-11434}:11434"
volumes:
- ollama_models:/root/.ollama
environment:
- OLLAMA_ORIGINS=*
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 30s
timeout: 10s
retries: 5
start_period: 30s
networks:
- picarones_net
# ββββββββββββββββββββββββββββββββββββββββββββββββ
# Volumes persistants
# ββββββββββββββββββββββββββββββββββββββββββββββββ
volumes:
picarones_history:
driver: local
ollama_models:
driver: local
# ββββββββββββββββββββββββββββββββββββββββββββββββ
# RΓ©seau interne
# ββββββββββββββββββββββββββββββββββββββββββββββββ
networks:
picarones_net:
driver: bridge
|