Update Dockerfile
Browse files- Dockerfile +8 -9
Dockerfile
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
|
|
| 1 |
FROM python:3.10-slim
|
| 2 |
|
| 3 |
WORKDIR /app
|
|
@@ -8,27 +9,25 @@ RUN apt-get update && apt-get install -y \
|
|
| 8 |
curl \
|
| 9 |
&& rm -rf /var/lib/apt/lists/*
|
| 10 |
|
| 11 |
-
# Install llama-cpp-python from
|
| 12 |
RUN pip install --no-cache-dir \
|
| 13 |
--extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu \
|
| 14 |
llama-cpp-python==0.2.24
|
| 15 |
|
| 16 |
-
# Copy
|
| 17 |
COPY requirements.txt .
|
| 18 |
RUN pip install --no-cache-dir --upgrade pip && \
|
| 19 |
pip install --no-cache-dir -r requirements.txt
|
| 20 |
|
| 21 |
-
# Download the model
|
| 22 |
RUN wget --progress=bar:force:noscroll -O capybarahermes-2.5-mistral-7b.Q5_K_M.gguf \
|
| 23 |
https://huggingface.co/TheBloke/CapybaraHermes-2.5-Mistral-7B-GGUF/resolve/main/capybarahermes-2.5-mistral-7b.Q5_K_M.gguf
|
| 24 |
|
| 25 |
-
# Copy
|
| 26 |
COPY api.py .
|
| 27 |
-
COPY app.py .
|
| 28 |
|
|
|
|
| 29 |
EXPOSE 7860
|
| 30 |
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
CMD ["streamlit", "run", "app.py", "--server.port=7860", "--server.address=0.0.0.0", "--server.headless=true"]
|
|
|
|
| 1 |
+
# Dockerfile
|
| 2 |
FROM python:3.10-slim
|
| 3 |
|
| 4 |
WORKDIR /app
|
|
|
|
| 9 |
curl \
|
| 10 |
&& rm -rf /var/lib/apt/lists/*
|
| 11 |
|
| 12 |
+
# Install llama-cpp-python from its prebuilt wheel index for CPU
|
| 13 |
RUN pip install --no-cache-dir \
|
| 14 |
--extra-index-url https://abetlen.github.io/llama-cpp-python/whl/cpu \
|
| 15 |
llama-cpp-python==0.2.24
|
| 16 |
|
| 17 |
+
# Copy and install Python requirements
|
| 18 |
COPY requirements.txt .
|
| 19 |
RUN pip install --no-cache-dir --upgrade pip && \
|
| 20 |
pip install --no-cache-dir -r requirements.txt
|
| 21 |
|
| 22 |
+
# Download the model file
|
| 23 |
RUN wget --progress=bar:force:noscroll -O capybarahermes-2.5-mistral-7b.Q5_K_M.gguf \
|
| 24 |
https://huggingface.co/TheBloke/CapybaraHermes-2.5-Mistral-7B-GGUF/resolve/main/capybarahermes-2.5-mistral-7b.Q5_K_M.gguf
|
| 25 |
|
| 26 |
+
# Copy the API application file
|
| 27 |
COPY api.py .
|
|
|
|
| 28 |
|
| 29 |
+
# Expose the port Hugging Face uses
|
| 30 |
EXPOSE 7860
|
| 31 |
|
| 32 |
+
# Command to run the Uvicorn server on the public port
|
| 33 |
+
CMD ["uvicorn", "api:app", "--host", "0.0.0.0", "--port", "7860"]
|
|
|
|
|
|