# Builder stage FROM ubuntu:latest # Use an official Python runtime as a parent image FROM python:3.10-slim # Set the working directory in the container #WORKDIR /usr/src/app #Copy the rest of your app's code into the container WORKDIR /code RUN chmod -R 755 /code COPY ./requirements.txt /code/requirements.txt # Expose the port that Gradio will run on EXPOSE 7860 # Update the package list and install pip RUN apt-get update && apt-get install -y python3-pip RUN apt update && apt install curl -y RUN curl https://ollama.ai/install.sh | sh # Create the directory and give appropriate permissions RUN mkdir -p /.ollama && chmod 777 /.ollama # Command to run the application CMD ollama serve CMD ollama run mistral # Expose the server port EXPOSE 7860 COPY requirements.txt ./ RUN pip3 install --no-cache-dir -r requirements.txt # Make port 80 available to the world outside this container EXPOSE 80 # Define environment variable ENV NAME World COPY ./app.py /code/app.py RUN mkdir -p ./sfbook RUN chmod 777 ./sfbook RUN apt-get update && apt-get install -y init && apt-get clean all # RUN init # Create the directory and set up the environment configuration for ollama.service # RUN mkdir -p /etc/systemd/system/ollama.service.d && \ # echo '[Service]' >> /etc/systemd/system/ollama.service.d/environment.conf && \ # echo 'Environment="OLLAMA_HOST=0.0.0.0:11434"' >> /etc/systemd/system/ollama.service.d/environment.conf #RUN apt-get install --reinstall -y systems # Reload systemd manager configuration and restart the ollama service # RUN systemctl daemon-reload && \ # systemctl restart ollama RUN ollama serve RUN ollama run mistral # Run app.py when the container launches CMD ["python", "app.py"]