Spaces:
Sleeping
Sleeping
| FROM python:3.10-slim | |
| # Installer les dépendances système | |
| RUN apt-get update && apt-get install -y \ | |
| build-essential \ | |
| cmake \ | |
| git \ | |
| curl \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # Installer llama-cpp-python avec support CUDA (si disponible) | |
| RUN pip install --no-cache-dir \ | |
| llama-cpp-python[server] \ | |
| fastapi \ | |
| uvicorn \ | |
| pydantic \ | |
| requests \ | |
| huggingface-hub | |
| # Créer un utilisateur non-root | |
| RUN useradd -m -u 1000 user | |
| USER user | |
| WORKDIR /home/user | |
| # Copier l'application | |
| COPY --chown=user:user api.py . | |
| COPY --chown=user:user download_model.py . | |
| # Télécharger le modèle GGUF au build (optionnel) | |
| # RUN python download_model.py | |
| EXPOSE 7860 | |
| # Démarrer | |
| CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"] |