Spaces:
Sleeping
Sleeping
| # Use a lightweight Python base | |
| FROM python:3.10-slim | |
| # Set working directory | |
| WORKDIR /app | |
| # Install system tools (curl is needed to install Ollama) | |
| RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/* | |
| # Install Ollama | |
| RUN curl -fsSL https://ollama.com/install.sh | sh | |
| # Install Python API requirements | |
| RUN pip install --no-cache-dir fastapi uvicorn requests | |
| # Copy your API code | |
| COPY . . | |
| # Create a start script to handle the background process | |
| RUN echo '#!/bin/bash' > start.sh && \ | |
| echo 'ollama serve &' >> start.sh && \ | |
| echo 'sleep 5' >> start.sh && \ | |
| echo 'ollama pull granite4:latest' >> start.sh && \ | |
| echo 'ollama pull llama3.2:latest' >> start.sh && \ | |
| echo 'ollama pull gemma3:latest' >> start.sh && \ | |
| echo 'uvicorn api:app --host 0.0.0.0 --port 7860' >> start.sh && \ | |
| chmod +x start.sh | |
| # Hugging Face expects port 7860 | |
| EXPOSE 7860 | |
| # Run the start script | |
| CMD ["./start.sh"] |