owlninjam commited on
Commit
c4a3354
·
verified ·
1 Parent(s): d965c5c

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +10 -7
Dockerfile CHANGED
@@ -2,35 +2,38 @@ FROM python:3.10-slim
2
 
3
  WORKDIR /app
4
 
5
- # Install minimal system dependencies
6
  RUN apt-get update && apt-get install -y \
7
  wget \
8
  curl \
 
 
 
9
  && rm -rf /var/lib/apt/lists/*
10
 
11
- # Copy requirements first for better caching
12
  COPY requirements.txt .
13
 
14
- # Install Python dependencies, forcing precompiled wheels for llama-cpp-python
15
  RUN pip install --no-cache-dir --upgrade pip && \
16
  pip install --no-cache-dir --prefer-binary -r requirements.txt
17
 
18
- # Download model file
19
  RUN echo "📥 Downloading CapybaraHermes model..." && \
20
  wget --progress=bar:force:noscroll -O capybarahermes-2.5-mistral-7b.Q5_K_M.gguf \
21
  https://huggingface.co/TheBloke/CapybaraHermes-2.5-Mistral-7B-GGUF/resolve/main/capybarahermes-2.5-mistral-7b.Q5_K_M.gguf && \
22
  echo "✅ Model downloaded: $(du -h capybarahermes-2.5-mistral-7b.Q5_K_M.gguf | cut -f1)"
23
 
24
- # Copy application files
25
  COPY api.py .
26
  COPY app.py .
27
 
28
- # Expose Hugging Face Spaces port
29
  EXPOSE 7860
30
 
31
  # Health check
32
  HEALTHCHECK --interval=30s --timeout=30s --start-period=300s --retries=3 \
33
  CMD curl --fail http://localhost:7860/_stcore/health || exit 1
34
 
35
- # Run Streamlit on port 7860
36
  CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0", "--server.headless=true"]
 
2
 
3
  WORKDIR /app
4
 
5
+ # Install minimal system dependencies + build tools
6
  RUN apt-get update && apt-get install -y \
7
  wget \
8
  curl \
9
+ build-essential \
10
+ cmake \
11
+ python3-dev \
12
  && rm -rf /var/lib/apt/lists/*
13
 
14
+ # Copy requirements
15
  COPY requirements.txt .
16
 
17
+ # Install Python packages
18
  RUN pip install --no-cache-dir --upgrade pip && \
19
  pip install --no-cache-dir --prefer-binary -r requirements.txt
20
 
21
+ # Download GGUF model
22
  RUN echo "📥 Downloading CapybaraHermes model..." && \
23
  wget --progress=bar:force:noscroll -O capybarahermes-2.5-mistral-7b.Q5_K_M.gguf \
24
  https://huggingface.co/TheBloke/CapybaraHermes-2.5-Mistral-7B-GGUF/resolve/main/capybarahermes-2.5-mistral-7b.Q5_K_M.gguf && \
25
  echo "✅ Model downloaded: $(du -h capybarahermes-2.5-mistral-7b.Q5_K_M.gguf | cut -f1)"
26
 
27
+ # Copy app code
28
  COPY api.py .
29
  COPY app.py .
30
 
31
+ # Expose Hugging Face port
32
  EXPOSE 7860
33
 
34
  # Health check
35
  HEALTHCHECK --interval=30s --timeout=30s --start-period=300s --retries=3 \
36
  CMD curl --fail http://localhost:7860/_stcore/health || exit 1
37
 
38
+ # Start Streamlit
39
  CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0", "--server.headless=true"]