nihardon commited on
Commit
4f57bb7
·
verified ·
1 Parent(s): 1fac6c7

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +8 -11
Dockerfile CHANGED
@@ -1,25 +1,22 @@
1
- # 1. Use Python 3.9 (The most stable version for this)
2
  FROM python:3.9
3
 
4
- # 2. Upgrade pip so it understands the platform flags
5
  RUN pip install --no-cache-dir --upgrade pip
6
 
7
- # 3. Install the specific Hugging Face version (Prevents the "HfFolder" crash)
8
  RUN pip install "huggingface_hub<0.25.0"
9
 
10
- # 4. Install llama-cpp-python with STRICT constraints
11
- # --platform manylinux... -> Forces it to ignore the "Musl" version that broke it before
12
- # --only-binary=:all: -> Forces it to download, never compile
13
  RUN pip install --no-cache-dir \
14
- llama-cpp-python \
15
- --extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu \
16
- --platform manylinux_2_17_x86_64 \
17
- --only-binary=:all:
18
 
19
  # 5. Install Gradio
20
  RUN pip install --no-cache-dir gradio
21
 
22
- # 6. Run the app
23
  WORKDIR /app
24
  COPY . .
25
  CMD ["python", "app.py"]
 
1
+ # 1. Use Python 3.9 (Standard Linux)
2
  FROM python:3.9
3
 
4
+ # 2. Upgrade pip (Essential)
5
  RUN pip install --no-cache-dir --upgrade pip
6
 
7
+ # 3. Install Hugging Face Hub FIRST (Pinned to safe version)
8
  RUN pip install "huggingface_hub<0.25.0"
9
 
10
+ # 4. Install Llama-cpp-python via DIRECT URL
11
+ # We point directly to the v0.2.55 manylinux wheel.
12
+ # This bypasses the index search and GUARANTEES the correct file.
13
  RUN pip install --no-cache-dir \
14
+ https://github.com/abetlen/llama-cpp-python/releases/download/v0.2.55/llama_cpp_python-0.2.55-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl
 
 
 
15
 
16
  # 5. Install Gradio
17
  RUN pip install --no-cache-dir gradio
18
 
19
+ # 6. Run
20
  WORKDIR /app
21
  COPY . .
22
  CMD ["python", "app.py"]