Update Dockerfile
Browse files- Dockerfile +14 -25
Dockerfile
CHANGED
|
@@ -1,47 +1,36 @@
|
|
| 1 |
-
FROM
|
| 2 |
|
| 3 |
-
#
|
| 4 |
-
|
| 5 |
|
| 6 |
-
#
|
| 7 |
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 8 |
-
python3 \
|
| 9 |
-
python3-pip \
|
| 10 |
ffmpeg \
|
| 11 |
&& rm -rf /var/lib/apt/lists/*
|
| 12 |
|
| 13 |
-
#
|
| 14 |
RUN useradd -m -u 1000 user
|
| 15 |
USER user
|
| 16 |
-
|
| 17 |
-
# 4. Set environment variables
|
| 18 |
ENV PATH="/home/user/.local/bin:$PATH"
|
| 19 |
ENV HF_HOME="/home/user/.cache/huggingface"
|
| 20 |
-
# CRITICAL: This disables buffering so logs print instantly in the HF Console
|
| 21 |
-
ENV PYTHONUNBUFFERED=1
|
| 22 |
|
| 23 |
WORKDIR /home/user/app
|
| 24 |
|
| 25 |
-
#
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
|
|
|
|
|
|
|
|
|
| 29 |
flask==3.1.1 \
|
| 30 |
edge-tts \
|
| 31 |
num2words==0.5.14 \
|
| 32 |
"huggingface_hub>=0.27.0" \
|
| 33 |
-
"numpy<2.0"
|
| 34 |
-
llama-cpp-python \
|
| 35 |
-
--extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
|
| 36 |
|
| 37 |
-
# 6. Copy your Python app into the container
|
| 38 |
COPY --chown=user:user . .
|
| 39 |
|
| 40 |
EXPOSE 7860
|
| 41 |
|
| 42 |
-
|
| 43 |
-
# Otherwise, it attempts to run `/app/llama-cli python3 app.py` and crashes.
|
| 44 |
-
ENTRYPOINT[]
|
| 45 |
-
|
| 46 |
-
# 8. Start the app
|
| 47 |
-
CMD ["python3", "app.py"]
|
|
|
|
| 1 |
+
FROM python:3.11-slim
|
| 2 |
|
| 3 |
+
# Force logs to show up instantly in Hugging Face
|
| 4 |
+
ENV PYTHONUNBUFFERED=1
|
| 5 |
|
| 6 |
+
# Install only the system dependency needed for edge-tts (audio)
|
| 7 |
RUN apt-get update && apt-get install -y --no-install-recommends \
|
|
|
|
|
|
|
| 8 |
ffmpeg \
|
| 9 |
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
|
| 11 |
+
# Set up the Hugging Face user
|
| 12 |
RUN useradd -m -u 1000 user
|
| 13 |
USER user
|
|
|
|
|
|
|
| 14 |
ENV PATH="/home/user/.local/bin:$PATH"
|
| 15 |
ENV HF_HOME="/home/user/.cache/huggingface"
|
|
|
|
|
|
|
| 16 |
|
| 17 |
WORKDIR /home/user/app
|
| 18 |
|
| 19 |
+
# Install standard Python libraries (No llama.cpp, no C++ compiling)
|
| 20 |
+
RUN pip install --no-cache-dir --upgrade pip && \
|
| 21 |
+
pip install --no-cache-dir \
|
| 22 |
+
torch==2.6.0+cpu --extra-index-url https://download.pytorch.org/whl/cpu \
|
| 23 |
+
"transformers>=4.49.0" \
|
| 24 |
+
"accelerate>=1.3.0" \
|
| 25 |
+
"gguf>=0.10.0" \
|
| 26 |
flask==3.1.1 \
|
| 27 |
edge-tts \
|
| 28 |
num2words==0.5.14 \
|
| 29 |
"huggingface_hub>=0.27.0" \
|
| 30 |
+
"numpy<2.0"
|
|
|
|
|
|
|
| 31 |
|
|
|
|
| 32 |
COPY --chown=user:user . .
|
| 33 |
|
| 34 |
EXPOSE 7860
|
| 35 |
|
| 36 |
+
CMD["python", "app.py"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|