File size: 1,737 Bytes
d4eb21a c3b0573 d4eb21a c3b0573 df203b7 6d3dbb0 19fbeb6 6e651fb 19fbeb6 5955f1f c3b0573 d4eb21a ab8a6d7 d4eb21a ab8a6d7 c3b0573 ab8a6d7 c3f43bd ab8a6d7 6af6d63 53f6229 6af6d63 536c1e9 756dcfc 536c1e9 ad1d3ea 756dcfc 536c1e9 9af0782 ab8a6d7 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 |
# Builder stage
FROM ubuntu:latest
# Use an official Python runtime as a parent image
FROM python:3.10-slim
# Set the working directory in the container
#WORKDIR /usr/src/app
#Copy the rest of your app's code into the container
WORKDIR /code
RUN chmod -R 755 /code
COPY ./requirements.txt /code/requirements.txt
# Expose the port that Gradio will run on
EXPOSE 7860
# Update the package list and install pip
RUN apt-get update && apt-get install -y python3-pip
RUN apt update && apt install curl -y
RUN curl https://ollama.ai/install.sh | sh
# Create the directory and give appropriate permissions
RUN mkdir -p /.ollama && chmod 777 /.ollama
# Command to run the application
CMD ollama serve
CMD ollama run mistral
# Expose the server port
EXPOSE 7860
COPY requirements.txt ./
RUN pip3 install --no-cache-dir -r requirements.txt
# Make port 80 available to the world outside this container
EXPOSE 80
# Define environment variable
ENV NAME World
COPY ./app.py /code/app.py
RUN mkdir -p ./sfbook
RUN chmod 777 ./sfbook
RUN apt-get update && apt-get install -y init && apt-get clean all
# RUN init
# Create the directory and set up the environment configuration for ollama.service
# RUN mkdir -p /etc/systemd/system/ollama.service.d && \
# echo '[Service]' >> /etc/systemd/system/ollama.service.d/environment.conf && \
# echo 'Environment="OLLAMA_HOST=0.0.0.0:11434"' >> /etc/systemd/system/ollama.service.d/environment.conf
#RUN apt-get install --reinstall -y systems
# Reload systemd manager configuration and restart the ollama service
# RUN systemctl daemon-reload && \
# systemctl restart ollama
RUN ollama serve
RUN ollama run mistral
# Run app.py when the container launches
CMD ["python", "app.py"]
|