Spaces:
Sleeping
Sleeping
Update Dockerfile
Browse files- Dockerfile +12 -9
Dockerfile
CHANGED
|
@@ -1,22 +1,25 @@
|
|
| 1 |
-
#
|
| 2 |
-
FROM python:3.10-
|
| 3 |
|
| 4 |
# Set working directory
|
| 5 |
WORKDIR /app
|
| 6 |
|
| 7 |
-
# 1. Install
|
|
|
|
| 8 |
RUN apt-get update && apt-get install -y \
|
| 9 |
libgl1 \
|
| 10 |
libglib2.0-0 \
|
| 11 |
git \
|
|
|
|
|
|
|
| 12 |
&& rm -rf /var/lib/apt/lists/*
|
| 13 |
|
| 14 |
-
# 2.
|
| 15 |
ENV PIP_NO_CACHE_DIR=1
|
| 16 |
ENV PIP_ONLY_BINARY=:all:
|
| 17 |
RUN pip install --upgrade pip
|
| 18 |
|
| 19 |
-
# 3.
|
| 20 |
RUN pip install numpy==1.26.4
|
| 21 |
RUN pip install cryptography==42.0.5
|
| 22 |
RUN pip install huggingface_hub==0.23.0
|
|
@@ -25,11 +28,11 @@ RUN pip install pandas==2.2.2
|
|
| 25 |
RUN pip install matplotlib==3.8.4
|
| 26 |
RUN pip install gradio==4.44.0
|
| 27 |
|
| 28 |
-
# 4. Install Llama
|
| 29 |
-
# We
|
| 30 |
-
RUN pip install
|
| 31 |
|
| 32 |
-
# 5.
|
| 33 |
COPY . .
|
| 34 |
ENV GRADIO_SERVER_NAME="0.0.0.0"
|
| 35 |
EXPOSE 7860
|
|
|
|
| 1 |
+
# Use a Python 3.10 image based on Debian Bookworm (Not Alpine!)
|
| 2 |
+
FROM python:3.10-bookworm
|
| 3 |
|
| 4 |
# Set working directory
|
| 5 |
WORKDIR /app
|
| 6 |
|
| 7 |
+
# 1. Install system tools and build essentials
|
| 8 |
+
# We add g++ and make just in case the system needs to link local binaries
|
| 9 |
RUN apt-get update && apt-get install -y \
|
| 10 |
libgl1 \
|
| 11 |
libglib2.0-0 \
|
| 12 |
git \
|
| 13 |
+
g++ \
|
| 14 |
+
make \
|
| 15 |
&& rm -rf /var/lib/apt/lists/*
|
| 16 |
|
| 17 |
+
# 2. Strict Pip Environment
|
| 18 |
ENV PIP_NO_CACHE_DIR=1
|
| 19 |
ENV PIP_ONLY_BINARY=:all:
|
| 20 |
RUN pip install --upgrade pip
|
| 21 |
|
| 22 |
+
# 3. Individual Component Install (Memory Protection)
|
| 23 |
RUN pip install numpy==1.26.4
|
| 24 |
RUN pip install cryptography==42.0.5
|
| 25 |
RUN pip install huggingface_hub==0.23.0
|
|
|
|
| 28 |
RUN pip install matplotlib==3.8.4
|
| 29 |
RUN pip install gradio==4.44.0
|
| 30 |
|
| 31 |
+
# 4. Install Llama-CPP-Python (The Reliable Method)
|
| 32 |
+
# We avoid external URLs and use the standard index with a CPU-FORCE flag
|
| 33 |
+
RUN CMAKE_ARGS="-DLLAMA_BLAS=OFF -DLLAMA_CUDA=OFF" pip install llama-cpp-python==0.2.90
|
| 34 |
|
| 35 |
+
# 5. finalize
|
| 36 |
COPY . .
|
| 37 |
ENV GRADIO_SERVER_NAME="0.0.0.0"
|
| 38 |
EXPOSE 7860
|