chatbot / Dockerfile
Krish-05's picture
Update Dockerfile
b2b9a16 verified
# Stage 1: Prepare Ollama with the model
# Uses the official Ollama image as the base for this stage, which includes Ollama pre-installed.
FROM ollama/ollama AS ollama-builder
# Set OLLAMA_HOME for this stage to ensure consistent data location for Ollama.
# Ollama in its default image often uses /root/.ollama.
ENV OLLAMA_HOME="/root/.ollama"
# Ensure the Ollama home directory exists within the builder stage.
RUN mkdir -p ${OLLAMA_HOME}
# Start Ollama server in the background and wait for it to be ready.
# Then, pull the specified model from the Ollama library.
# We explicitly use /usr/bin/ollama as confirmed by previous successful 'find' operations.
# Using bash -c for robustness with backgrounding and multiple commands.
# The sleep 15 is crucial to give Ollama enough time to start before attempting to pull.
RUN bash -c "/usr/bin/ollama serve > \"${OLLAMA_HOME}/ollama_build.log\" 2>&1 & \
sleep 15 && \
/usr/bin/ollama pull krishna_choudhary/AI_Assistant_Chatbot || \
(cat \"${OLLAMA_HOME}/ollama_build.log\" && exit 1)"
# Stage 2: Build the final application image with Streamlit and pre-configured Ollama
# Using a lightweight Ubuntu base image for the final application.
FROM ubuntu:22.04
# Create a non-root user and group for running the application.
# This is crucial for security and to resolve permission issues at runtime on Hugging Face Spaces.
ARG UID=1000
ARG GID=1000
RUN groupadd -r appgroup -g ${GID} && useradd -r -u ${UID} -g appgroup appuser
# CRITICAL FIX: Explicitly create the home directory for appuser and set its ownership.
# This resolves permission issues when Ollama (or any other application)
# tries to access ~/.ollama or other files in the user's default home directory.
RUN mkdir -p /home/appuser && chown appuser:appgroup /home/appuser
# Set the working directory inside the container.
WORKDIR /app
# Install system dependencies for Streamlit (Python, pip) and general utilities (curl, git).
# --no-install-recommends helps keep the image smaller.
RUN apt-get update && apt-get install -y --no-install-recommends \
python3.9 \
python3-pip \
build-essential \
curl \
software-properties-common \
git \
&& rm -rf /var/lib/apt/lists/* \
&& apt-get clean
# Copy Python requirements file and install Python dependencies.
COPY requirements.txt ./
RUN pip3 install --no-cache-dir -r /app/requirements.txt
# Copy the Streamlit application source code into the container.
COPY src/ ./src/
# --- Copy Ollama binaries and model data from the builder stage ---
# Copy the Ollama executable from the first stage to the final image.
# We know from previous logs that it's located at /usr/bin/ollama in the builder.
COPY --from=ollama-builder /usr/bin/ollama /usr/local/bin/ollama
# Set OLLAMA_HOME for the final image's runtime, pointing to a writable location within /app.
ENV OLLAMA_HOME="/app/.ollama"
# Copy the startup script into the container and make it executable.
# This must happen *before* switching to the non-root user, as /usr/local/bin requires root permissions.
COPY start.sh /usr/local/bin/start.sh
RUN chmod +x /usr/local/bin/start.sh
# Change ownership of the /app directory and its contents to the non-root user.
# This is critical. It ensures the 'appuser' has full read/write/execute permissions within /app,
# where Streamlit, Ollama's runtime data, and your application code reside.
RUN chown -R appuser:appgroup /app
# Switch to the non-root user. All subsequent commands in the Dockerfile (and the ENTRYPOINT)
# will run as 'appuser', adhering to best security practices.
USER appuser
# --- Streamlit specific setup for Docker stability and configuration ---
# Disable Streamlit usage statistics.
ENV STREAMLIT_SERVER_BROWSER_GATHER_USAGE_STATS="false"
# Set file watcher type to none for better stability in Docker environments, especially on Spaces.
ENV STREAMLIT_SERVER_FILE_WATCHER_TYPE="none"
# Set the port Streamlit will listen on. Hugging Face Spaces expose 8501 for Streamlit.
ENV STREAMLIT_SERVER_PORT="8501"
# Set the address Streamlit will bind to.
ENV STREAMLIT_SERVER_ADDRESS="0.0.0.0"
# Ensure Streamlit's cache directory is within the writable /app directory.
ENV STREAMLIT_GLOBAL_DATA_PATH="/app/.streamlit"
# No explicit mkdir -p needed here, as Streamlit will create it if needed,
# and /app is now correctly owned by appuser.
# Expose ports that the container will listen on.
# 8501 for Streamlit, 11434 for Ollama API.
EXPOSE 8501
EXPOSE 11434
# Define a healthcheck to ensure Streamlit is running and responsive.
# This helps Hugging Face Spaces determine if your app is truly alive.
HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health || exit 1
# Set the startup script as the main entrypoint for the container.
ENTRYPOINT ["/usr/local/bin/start.sh"]