ProfessorCEO commited on
Commit
fb559c4
·
verified ·
1 Parent(s): 0a2b15c

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +13 -24
Dockerfile CHANGED
@@ -1,33 +1,22 @@
1
- # We use the full image, not slim, to ensure better compatibility
2
- FROM python:3.10-slim
 
3
 
4
- # 1. Setup User (Required by Spaces)
 
 
 
 
 
 
5
  RUN useradd -m -u 1000 user
6
  USER user
7
  ENV HOME=/home/user \
8
  PATH=/home/user/.local/bin:$PATH
9
-
10
  WORKDIR $HOME/app
11
 
12
- # 2. INSTALL THE FACTORY TOOLS (Mandatory)
13
- # We need these. Without them, the build fails.
14
- USER root
15
- RUN apt-get update && apt-get install -y \
16
- build-essential \
17
- cmake \
18
- git \
19
- && rm -rf /var/lib/apt/lists/*
20
- USER user
21
-
22
- # 3. INSTALL WITH FALLBACK
23
- # We try to download the binary. If that fails, we now have the tools (gcc) to build it.
24
- # We set CMAKE_ARGS to make the build faster if it happens.
25
- ENV CMAKE_ARGS="-DLLAMA_BLAS=OFF"
26
-
27
- COPY --chown=user requirements.txt requirements.txt
28
- RUN pip install --no-cache-dir --upgrade pip && \
29
- pip install --no-cache-dir -r requirements.txt --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu
30
-
31
- # 4. LAUNCH
32
  COPY --chown=user . .
 
 
33
  CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]
 
1
+ # ☢️ NUCLEAR OPTION: Use the Official Pre-Built Image
2
+ # This image already has the AI engine (llama-cpp) compiled and installed.
3
+ FROM ghcr.io/abetlen/llama-cpp-python:latest
4
 
5
+ # 1. Switch to Root to install our extra tools
6
+ USER root
7
+
8
+ # 2. Install only the missing pieces (FastAPI, Downloader)
9
+ RUN pip install --no-cache-dir fastapi uvicorn huggingface_hub python-dotenv pydantic
10
+
11
+ # 3. Setup the User (Strict Requirement for Hugging Face Spaces)
12
  RUN useradd -m -u 1000 user
13
  USER user
14
  ENV HOME=/home/user \
15
  PATH=/home/user/.local/bin:$PATH
 
16
  WORKDIR $HOME/app
17
 
18
+ # 4. Copy our "Librarian" Script
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
19
  COPY --chown=user . .
20
+
21
+ # 5. Launch the Server
22
  CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]