Update Dockerfile
Browse files- Dockerfile +90 -6
Dockerfile
CHANGED
|
@@ -1,21 +1,105 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
| 2 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 3 |
WORKDIR /app
|
| 4 |
|
| 5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
build-essential \
|
| 7 |
curl \
|
| 8 |
software-properties-common \
|
| 9 |
git \
|
| 10 |
-
&& rm -rf /var/lib/apt/lists/*
|
|
|
|
| 11 |
|
|
|
|
| 12 |
COPY requirements.txt ./
|
|
|
|
|
|
|
|
|
|
| 13 |
COPY src/ ./src/
|
| 14 |
|
| 15 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
|
|
|
|
|
|
| 17 |
EXPOSE 8501
|
|
|
|
| 18 |
|
| 19 |
-
|
|
|
|
|
|
|
| 20 |
|
| 21 |
-
|
|
|
|
|
|
| 1 |
+
# Stage 1: Prepare Ollama with the model
|
| 2 |
+
# Uses the official Ollama image as the base for this stage, which includes Ollama pre-installed.
|
| 3 |
+
FROM ollama/ollama AS ollama-builder
|
| 4 |
|
| 5 |
+
# Set OLLAMA_HOME for this stage to ensure consistent data location for Ollama.
|
| 6 |
+
# Ollama in its default image often uses /root/.ollama.
|
| 7 |
+
ENV OLLAMA_HOME="/root/.ollama"
|
| 8 |
+
# Ensure the Ollama home directory exists within the builder stage.
|
| 9 |
+
RUN mkdir -p ${OLLAMA_HOME}
|
| 10 |
+
|
| 11 |
+
# Start Ollama server in the background and wait for it to be ready.
|
| 12 |
+
# Then, pull the specified model from the Ollama library.
|
| 13 |
+
# We explicitly use /usr/bin/ollama as confirmed by previous successful 'find' operations.
|
| 14 |
+
# Using bash -c for robustness with backgrounding and multiple commands.
|
| 15 |
+
# The sleep 15 is crucial to give Ollama enough time to start before attempting to pull.
|
| 16 |
+
RUN bash -c "/usr/bin/ollama serve > \"${OLLAMA_HOME}/ollama_build.log\" 2>&1 & \
|
| 17 |
+
sleep 15 && \
|
| 18 |
+
/usr/bin/ollama pull krishna_choudhary/AI_Assistant_Chatbot || \
|
| 19 |
+
(cat \"${OLLAMA_HOME}/ollama_build.log\" && exit 1)"
|
| 20 |
+
|
| 21 |
+
# Stage 2: Build the final application image with Streamlit and pre-configured Ollama
|
| 22 |
+
# Using a lightweight Ubuntu base image for the final application.
|
| 23 |
+
FROM ubuntu:22.04
|
| 24 |
+
|
| 25 |
+
# Create a non-root user and group for running the application.
|
| 26 |
+
# This is crucial for security and to resolve permission issues at runtime on Hugging Face Spaces.
|
| 27 |
+
ARG UID=1000
|
| 28 |
+
ARG GID=1000
|
| 29 |
+
RUN groupadd -r appgroup -g ${GID} && useradd -r -u ${UID} -g appgroup appuser
|
| 30 |
+
|
| 31 |
+
# CRITICAL FIX: Explicitly create the home directory for appuser and set its ownership.
|
| 32 |
+
# This resolves permission issues when Ollama (or any other application)
|
| 33 |
+
# tries to access ~/.ollama or other files in the user's default home directory.
|
| 34 |
+
RUN mkdir -p /home/appuser && chown appuser:appgroup /home/appuser
|
| 35 |
+
|
| 36 |
+
# Set the working directory inside the container.
|
| 37 |
WORKDIR /app
|
| 38 |
|
| 39 |
+
# Install system dependencies for Streamlit (Python, pip) and general utilities (curl, git).
|
| 40 |
+
# --no-install-recommends helps keep the image smaller.
|
| 41 |
+
RUN apt-get update && apt-get install -y --no-install-recommends \
|
| 42 |
+
python3.9 \
|
| 43 |
+
python3-pip \
|
| 44 |
build-essential \
|
| 45 |
curl \
|
| 46 |
software-properties-common \
|
| 47 |
git \
|
| 48 |
+
&& rm -rf /var/lib/apt/lists/* \
|
| 49 |
+
&& apt-get clean
|
| 50 |
|
| 51 |
+
# Copy Python requirements file and install Python dependencies.
|
| 52 |
COPY requirements.txt ./
|
| 53 |
+
RUN pip3 install --no-cache-dir -r /app/requirements.txt
|
| 54 |
+
|
| 55 |
+
# Copy the Streamlit application source code into the container.
|
| 56 |
COPY src/ ./src/
|
| 57 |
|
| 58 |
+
# --- Copy Ollama binaries and model data from the builder stage ---
|
| 59 |
+
# Copy the Ollama executable from the first stage to the final image.
|
| 60 |
+
# We know from previous logs that it's located at /usr/bin/ollama in the builder.
|
| 61 |
+
COPY --from=ollama-builder /usr/bin/ollama /usr/local/bin/ollama
|
| 62 |
+
|
| 63 |
+
# Set OLLAMA_HOME for the final image's runtime, pointing to a writable location within /app.
|
| 64 |
+
ENV OLLAMA_HOME="/app/.ollama"
|
| 65 |
+
|
| 66 |
+
# Copy the startup script into the container and make it executable.
|
| 67 |
+
# This must happen *before* switching to the non-root user, as /usr/local/bin requires root permissions.
|
| 68 |
+
COPY start.sh /usr/local/bin/start.sh
|
| 69 |
+
RUN chmod +x /usr/local/bin/start.sh
|
| 70 |
+
|
| 71 |
+
# Change ownership of the /app directory and its contents to the non-root user.
|
| 72 |
+
# This is critical. It ensures the 'appuser' has full read/write/execute permissions within /app,
|
| 73 |
+
# where Streamlit, Ollama's runtime data, and your application code reside.
|
| 74 |
+
RUN chown -R appuser:appgroup /app
|
| 75 |
+
|
| 76 |
+
# Switch to the non-root user. All subsequent commands in the Dockerfile (and the ENTRYPOINT)
|
| 77 |
+
# will run as 'appuser', adhering to best security practices.
|
| 78 |
+
USER appuser
|
| 79 |
+
|
| 80 |
+
# --- Streamlit specific setup for Docker stability and configuration ---
|
| 81 |
+
# Disable Streamlit usage statistics.
|
| 82 |
+
ENV STREAMLIT_SERVER_BROWSER_GATHER_USAGE_STATS="false"
|
| 83 |
+
# Set file watcher type to none for better stability in Docker environments, especially on Spaces.
|
| 84 |
+
ENV STREAMLIT_SERVER_FILE_WATCHER_TYPE="none"
|
| 85 |
+
# Set the port Streamlit will listen on. Hugging Face Spaces expose 8501 for Streamlit.
|
| 86 |
+
ENV STREAMLIT_SERVER_PORT="8501"
|
| 87 |
+
# Set the address Streamlit will bind to.
|
| 88 |
+
ENV STREAMLIT_SERVER_ADDRESS="0.0.0.0"
|
| 89 |
+
|
| 90 |
+
# Ensure Streamlit's cache directory is within the writable /app directory.
|
| 91 |
+
ENV STREAMLIT_GLOBAL_DATA_PATH="/app/.streamlit"
|
| 92 |
+
# No explicit mkdir -p needed here, as Streamlit will create it if needed,
|
| 93 |
+
# and /app is now correctly owned by appuser.
|
| 94 |
|
| 95 |
+
# Expose ports that the container will listen on.
|
| 96 |
+
# 8501 for Streamlit, 11434 for Ollama API.
|
| 97 |
EXPOSE 8501
|
| 98 |
+
EXPOSE 11434
|
| 99 |
|
| 100 |
+
# Define a healthcheck to ensure Streamlit is running and responsive.
|
| 101 |
+
# This helps Hugging Face Spaces determine if your app is truly alive.
|
| 102 |
+
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health || exit 1
|
| 103 |
|
| 104 |
+
# Set the startup script as the main entrypoint for the container.
|
| 105 |
+
ENTRYPOINT ["/usr/local/bin/start.sh"]
|