| FROM ubuntu:22.04 |
|
|
| |
| RUN apt-get update && apt-get install -y \ |
| curl \ |
| ca-certificates \ |
| zstd \ |
| python3 \ |
| python3-pip \ |
| && rm -rf /var/lib/apt/lists/* |
|
|
| |
| RUN curl -fsSL https://ollama.ai/install.sh | sh |
|
|
| |
| COPY requirements.txt /tmp/requirements.txt |
| RUN pip3 install --no-cache-dir -r /tmp/requirements.txt |
|
|
| |
| RUN useradd -m -u 1000 user |
| USER user |
|
|
| ENV HOME=/home/user \ |
| PATH="/home/user/.local/bin:$PATH" \ |
| OLLAMA_HOST=127.0.0.1:11434 \ |
| OLLAMA_MODELS=/home/user/.ollama/models |
|
|
| WORKDIR $HOME/app |
|
|
| COPY --chown=user entrypoint.sh . |
| COPY --chown=user proxy.py . |
| RUN chmod +x entrypoint.sh |
|
|
| |
| USER root |
| RUN OLLAMA_HOST=127.0.0.1:11434 ollama serve & \ |
| sleep 5 && \ |
| ollama pull granite4:350m && \ |
| pkill ollama || true |
| USER user |
|
|
| EXPOSE 7860 |
|
|
| CMD ["./entrypoint.sh"] |