Spaces:
Sleeping
Sleeping
make GPU compatible
Browse files- Dockerfile +27 -29
Dockerfile
CHANGED
|
@@ -1,7 +1,9 @@
|
|
| 1 |
-
|
|
|
|
| 2 |
|
| 3 |
WORKDIR /app
|
| 4 |
|
|
|
|
| 5 |
RUN apt-get update && apt-get install -y \
|
| 6 |
build-essential \
|
| 7 |
curl \
|
|
@@ -9,36 +11,32 @@ RUN apt-get update && apt-get install -y \
|
|
| 9 |
poppler-utils \
|
| 10 |
&& rm -rf /var/lib/apt/lists/*
|
| 11 |
|
| 12 |
-
#
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
ENV HF_HOME=/app/.cache/huggingface
|
| 14 |
-
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface
|
| 15 |
-
ENV HF_DATASETS_CACHE=/app/.cache/huggingface
|
| 16 |
ENV HF_HUB_CACHE=/app/.cache/huggingface
|
|
|
|
|
|
|
| 17 |
|
| 18 |
-
# Install core dependencies first (for better Docker layer caching)
|
| 19 |
-
RUN pip3 install --no-cache-dir \
|
| 20 |
-
torch>=2.0.0 \
|
| 21 |
-
numpy>=1.21.0 \
|
| 22 |
-
Pillow>=9.0.0 \
|
| 23 |
-
tqdm>=4.60.0 \
|
| 24 |
-
pyyaml>=6.0 \
|
| 25 |
-
python-dotenv>=0.19.0 \
|
| 26 |
-
colpali-engine>=0.3.0 \
|
| 27 |
-
transformers>=4.35.0 \
|
| 28 |
-
qdrant-client>=1.7.0 \
|
| 29 |
-
streamlit>=1.25.0 \
|
| 30 |
-
httpx>=0.24.0 \
|
| 31 |
-
pandas \
|
| 32 |
-
altair \
|
| 33 |
-
datasets
|
| 34 |
-
|
| 35 |
-
# Pre-download HuggingFace models during build
|
| 36 |
COPY download_models.py ./
|
| 37 |
RUN mkdir -p /app/.cache/huggingface && \
|
| 38 |
chmod -R 755 /app/.cache && \
|
| 39 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
|
| 41 |
-
# Copy application
|
| 42 |
COPY visual_rag/ ./visual_rag/
|
| 43 |
COPY benchmarks/ ./benchmarks/
|
| 44 |
COPY demo/ ./demo/
|
|
@@ -51,6 +49,7 @@ RUN pip3 install -e .
|
|
| 51 |
# Setup Streamlit configuration
|
| 52 |
RUN mkdir -p /app/.streamlit && \
|
| 53 |
mkdir -p /app/.cache/streamlit && \
|
|
|
|
| 54 |
mkdir -p /app/results && \
|
| 55 |
printf '[server]\nport = 7860\nheadless = true\nenableCORS = false\nenableXsrfProtection = false\nmaxUploadSize = 500\n\n[browser]\ngatherUsageStats = false\n' > /app/.streamlit/config.toml && \
|
| 56 |
chmod -R 777 /app/.streamlit && \
|
|
@@ -59,15 +58,14 @@ RUN mkdir -p /app/.streamlit && \
|
|
| 59 |
chmod -R 777 /app/results
|
| 60 |
|
| 61 |
ENV STREAMLIT_CONFIG_HOME=/app/.streamlit
|
| 62 |
-
ENV STREAMLIT_USER_BASE_PATH=/app/.cache/streamlit
|
| 63 |
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
|
|
|
| 64 |
|
| 65 |
-
#
|
| 66 |
EXPOSE 7860
|
| 67 |
|
| 68 |
-
#
|
| 69 |
-
|
| 70 |
-
CMD curl --fail http://localhost:7860/_stcore/health || exit 1
|
| 71 |
|
| 72 |
# Run Streamlit app
|
| 73 |
ENTRYPOINT ["streamlit", "run", "demo_app.py", "--server.port=7860", "--server.address=0.0.0.0", "--server.headless", "true"]
|
|
|
|
| 1 |
+
# Use HuggingFace's pre-built GPU image (includes CUDA, PyTorch, Transformers)
|
| 2 |
+
FROM huggingface/transformers-pytorch-gpu:latest@sha256:4c7317881a534b22e18add49c925096fa902651fb0571c69f3cad58af3ea2c0f
|
| 3 |
|
| 4 |
WORKDIR /app
|
| 5 |
|
| 6 |
+
# Install system dependencies
|
| 7 |
RUN apt-get update && apt-get install -y \
|
| 8 |
build-essential \
|
| 9 |
curl \
|
|
|
|
| 11 |
poppler-utils \
|
| 12 |
&& rm -rf /var/lib/apt/lists/*
|
| 13 |
|
| 14 |
+
# Verify Python version
|
| 15 |
+
RUN python3 -V
|
| 16 |
+
|
| 17 |
+
# Copy requirements first (for better Docker layer caching)
|
| 18 |
+
COPY requirements.txt ./
|
| 19 |
+
|
| 20 |
+
# Install Python dependencies
|
| 21 |
+
RUN pip3 install --no-cache-dir -r requirements.txt
|
| 22 |
+
|
| 23 |
+
# Pre-download Hugging Face models during build
|
| 24 |
+
# IMPORTANT: Set cache directory BEFORE downloading to ensure models are cached
|
| 25 |
ENV HF_HOME=/app/.cache/huggingface
|
|
|
|
|
|
|
| 26 |
ENV HF_HUB_CACHE=/app/.cache/huggingface
|
| 27 |
+
ENV HF_DATASETS_CACHE=/app/.cache/huggingface
|
| 28 |
+
ENV TRANSFORMERS_CACHE=/app/.cache/huggingface
|
| 29 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 30 |
COPY download_models.py ./
|
| 31 |
RUN mkdir -p /app/.cache/huggingface && \
|
| 32 |
chmod -R 755 /app/.cache && \
|
| 33 |
+
chmod -R 755 /app/.cache/huggingface && \
|
| 34 |
+
python3 download_models.py
|
| 35 |
+
|
| 36 |
+
# Install colpali-engine (after model download to ensure deps are ready)
|
| 37 |
+
RUN pip3 install colpali-engine einops~=0.8.1
|
| 38 |
|
| 39 |
+
# Copy all application files
|
| 40 |
COPY visual_rag/ ./visual_rag/
|
| 41 |
COPY benchmarks/ ./benchmarks/
|
| 42 |
COPY demo/ ./demo/
|
|
|
|
| 49 |
# Setup Streamlit configuration
|
| 50 |
RUN mkdir -p /app/.streamlit && \
|
| 51 |
mkdir -p /app/.cache/streamlit && \
|
| 52 |
+
mkdir -p /app/.cache/huggingface && \
|
| 53 |
mkdir -p /app/results && \
|
| 54 |
printf '[server]\nport = 7860\nheadless = true\nenableCORS = false\nenableXsrfProtection = false\nmaxUploadSize = 500\n\n[browser]\ngatherUsageStats = false\n' > /app/.streamlit/config.toml && \
|
| 55 |
chmod -R 777 /app/.streamlit && \
|
|
|
|
| 58 |
chmod -R 777 /app/results
|
| 59 |
|
| 60 |
ENV STREAMLIT_CONFIG_HOME=/app/.streamlit
|
|
|
|
| 61 |
ENV STREAMLIT_BROWSER_GATHER_USAGE_STATS=false
|
| 62 |
+
ENV STREAMLIT_USER_BASE_PATH=/app/.cache/streamlit
|
| 63 |
|
| 64 |
+
# Expose port (HF Spaces uses 7860)
|
| 65 |
EXPOSE 7860
|
| 66 |
|
| 67 |
+
# Verify GPU availability (will show False during build, True at runtime)
|
| 68 |
+
RUN python3 -c "import torch; print('CUDA available:', torch.cuda.is_available())" || true
|
|
|
|
| 69 |
|
| 70 |
# Run Streamlit app
|
| 71 |
ENTRYPOINT ["streamlit", "run", "demo_app.py", "--server.port=7860", "--server.address=0.0.0.0", "--server.headless", "true"]
|