Gemma-Nano-Max / Dockerfile
CooLLaMACEO's picture
Update Dockerfile
c1494f5 verified
raw
history blame contribute delete
868 Bytes
# Use Python 3.10 on Debian Bookworm for best compatibility
FROM python:3.10-slim-bookworm
# 1. Install system essentials
RUN apt-get update && apt-get install -y \
wget \
git \
&& rm -rf /var/lib/apt/lists/*
# 2. Set up a non-root user (Hugging Face requirement for safety)
RUN useradd -m -u 1000 user
USER user
ENV PATH="/home/user/.local/bin:$PATH"
WORKDIR /home/user/app
# 3. Install Python dependencies
# We use the --extra-index-url to get the PRE-COMPILED llama-cpp-python
RUN pip install --no-cache-dir --upgrade pip && \
pip install --no-cache-dir \
fastapi \
uvicorn \
huggingface_hub \
--extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu \
llama-cpp-python
# 4. Copy your application code
COPY --chown=user . .
# 5. Expose the port HF expects
EXPOSE 7860
# 6. Run the app
CMD ["python", "main.py"]