| # ============================================================ | |
| # CCPA Compliance Analyzer | |
| # Base: NVIDIA CUDA 12.1 + Ubuntu 22.04 for GPU support | |
| # Falls back gracefully to CPU if no GPU is available | |
| # ============================================================ | |
| FROM nvidia/cuda:12.1.0-base-ubuntu22.04 | |
| # ── Build args ─────────────────────────────────────────────── | |
| ARG MODEL_NAME=llama3.2:3b | |
| ARG DEBIAN_FRONTEND=noninteractive | |
| # ── System dependencies ────────────────────────────────────── | |
| RUN apt-get update && apt-get install -y \ | |
| python3.11 \ | |
| python3.11-dev \ | |
| python3-pip \ | |
| curl \ | |
| ca-certificates \ | |
| && rm -rf /var/lib/apt/lists/* | |
| # Make python3.11 the default python | |
| RUN update-alternatives --install /usr/bin/python3 python3 /usr/bin/python3.11 1 && \ | |
| update-alternatives --install /usr/bin/python python /usr/bin/python3.11 1 | |
| # ── Install Ollama ─────────────────────────────────────────── | |
| RUN curl -fsSL https://ollama.com/install.sh | sh | |
| # ── Working directory ──────────────────────────────────────── | |
| WORKDIR /app | |
| # ── Python dependencies (cached layer) ────────────────────── | |
| COPY requirements.txt . | |
| RUN pip install --no-cache-dir -r requirements.txt | |
| # ── Application code ───────────────────────────────────────── | |
| COPY app.py . | |
| COPY ccpa_knowledge.py . | |
| # ── Pre-download model weights into the image ──────────────── | |
| # Ollama needs to be running to pull; we start it, pull, then stop. | |
| # OLLAMA_MODELS sets a consistent path for the weights. | |
| ENV OLLAMA_MODELS=/root/.ollama/models | |
| RUN ollama serve & \ | |
| SERVER_PID=$! && \ | |
| echo "Waiting for Ollama daemon..." && \ | |
| timeout 60 sh -c 'until curl -sf http://localhost:11434/api/tags; do sleep 2; done' && \ | |
| echo "Pulling model: ${MODEL_NAME}" && \ | |
| ollama pull ${MODEL_NAME} && \ | |
| echo "Model pull complete." && \ | |
| kill $SERVER_PID && \ | |
| wait $SERVER_PID 2>/dev/null || true | |
| # ── Startup script ─────────────────────────────────────────── | |
| COPY start.sh . | |
| RUN chmod +x start.sh | |
| # ── Runtime environment ────────────────────────────────────── | |
| ENV MODEL_NAME=${MODEL_NAME} | |
| ENV OLLAMA_HOST=http://localhost:11434 | |
| ENV PYTHONUNBUFFERED=1 | |
| # ── Port ───────────────────────────────────────────────────── | |
| EXPOSE 8000 | |
| # ── Health check ───────────────────────────────────────────── | |
| HEALTHCHECK --interval=15s --timeout=10s --start-period=120s --retries=5 \ | |
| CMD curl -sf http://localhost:8000/health || exit 1 | |
| # ── Entrypoint ─────────────────────────────────────────────── | |
| CMD ["./start.sh"] | |
| # Use a suitable base image (e.g., Ubuntu, Alpine, Node.js) | |
| FROM alpine:latest | |
| # Install unzip since Docker does not natively support extracting .zip files with ADD | |
| # and clean up package cache in a single RUN command to keep the image small | |
| RUN apk update && apk add unzip && rm -rf /var/cache/apk/* | |
| # Set the working directory inside the container | |
| WORKDIR /app | |
| # Copy the local zip file into the container | |
| COPY app.zip . | |
| # Unzip the file and then remove the zip file to keep the final image clean | |
| RUN unzip app.zip && rm app.zip | |
| # Define the command to run when the container starts (replace with your application's start command) | |
| CMD ["your_app_start_command"] | |