# Use the official Ollama image as the base FROM ollama/ollama # Install Python and dependencies for the custom API RUN apt-get update && apt-get install -y python3 python3-pip RUN pip3 install fastapi uvicorn requests # Create a directory for persistent storage (Hugging Face Spaces uses /data) RUN mkdir -p /data/.ollama && ln -s /data/.ollama /root/.ollama # Copy the startup script COPY startup.sh /startup.sh RUN chmod +x /startup.sh # Copy the FastAPI app COPY app.py /app.py # Expose the port for the FastAPI server (Hugging Face Spaces uses 8000) EXPOSE 8000 # Run ollama serve as the main command CMD ["serve"] # Start the startup script in the background during container initialization RUN echo "/startup.sh &" >> /etc/profile.d/startup.sh