Adding Docker
Browse files- Dockerfile +58 -0
- entrypoint.sh +36 -0
- requirements.txt +27 -0
Dockerfile
ADDED
|
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.10-slim-buster
|
| 2 |
+
|
| 3 |
+
# Set the working directory
|
| 4 |
+
WORKDIR /app
|
| 5 |
+
|
| 6 |
+
# Copy requirements file
|
| 7 |
+
COPY requirements.txt requirements.txt
|
| 8 |
+
|
| 9 |
+
# Update package list and install necessary packages in a single step
|
| 10 |
+
RUN apt-get update && apt-get install -y \
|
| 11 |
+
curl \
|
| 12 |
+
build-essential \
|
| 13 |
+
libffi-dev \
|
| 14 |
+
cmake \
|
| 15 |
+
libcurl4-openssl-dev \
|
| 16 |
+
tini && \
|
| 17 |
+
apt-get clean
|
| 18 |
+
|
| 19 |
+
# Upgrade pip and install dependencies
|
| 20 |
+
RUN python -m venv venv && \
|
| 21 |
+
. /app/venv/bin/activate && \
|
| 22 |
+
pip install --upgrade pip && \
|
| 23 |
+
pip install --no-cache-dir -r requirements.txt
|
| 24 |
+
|
| 25 |
+
# Install Ollama
|
| 26 |
+
RUN curl https://ollama.ai/install.sh | sh
|
| 27 |
+
|
| 28 |
+
# Create the directory and give appropriate permissions
|
| 29 |
+
RUN mkdir -p /.ollama && chmod 777 /.ollama
|
| 30 |
+
|
| 31 |
+
# Ensure Ollama binary is in the PATH
|
| 32 |
+
ENV PATH="/app/venv/bin:/root/.ollama/bin:$PATH"
|
| 33 |
+
|
| 34 |
+
# Expose the server port
|
| 35 |
+
EXPOSE 7860
|
| 36 |
+
EXPOSE 11434
|
| 37 |
+
EXPOSE 1338
|
| 38 |
+
EXPOSE 8501
|
| 39 |
+
# Copy the entry point script
|
| 40 |
+
COPY entrypoint.sh /entrypoint.sh
|
| 41 |
+
RUN chmod +x /entrypoint.sh
|
| 42 |
+
|
| 43 |
+
# Set the model as an environment variable (this can be overridden)
|
| 44 |
+
ENV model="default_model"
|
| 45 |
+
|
| 46 |
+
# Copy the entire application
|
| 47 |
+
COPY . .
|
| 48 |
+
|
| 49 |
+
# Set proper permissions for the translations directory
|
| 50 |
+
RUN chmod -R 777 translations
|
| 51 |
+
|
| 52 |
+
# Copy the startup script and make it executable
|
| 53 |
+
#COPY start.sh .
|
| 54 |
+
#RUN chmod +x start.sh
|
| 55 |
+
|
| 56 |
+
# Define the command to run the application
|
| 57 |
+
# Set the entry point script as the default command
|
| 58 |
+
ENTRYPOINT ["/entrypoint.sh"]
|
entrypoint.sh
ADDED
|
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
#!/bin/bash
|
| 2 |
+
|
| 3 |
+
# Source the virtual environment
|
| 4 |
+
source /app/venv/bin/activate
|
| 5 |
+
|
| 6 |
+
# Starting server
|
| 7 |
+
echo "Starting Ollama server"
|
| 8 |
+
ollama serve &
|
| 9 |
+
sleep 1
|
| 10 |
+
|
| 11 |
+
# Try to get the model environment variable
|
| 12 |
+
if [ -n "${MODEL}" ]; then
|
| 13 |
+
# Split the MODEL variable into an array
|
| 14 |
+
IFS=',' read -ra MODELS <<< "${MODEL}"
|
| 15 |
+
else
|
| 16 |
+
# Use the default list of models
|
| 17 |
+
MODELS=(llama3 ) #gemma:2b phi3 mistral
|
| 18 |
+
fi
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
# Splitting the models by comma and pulling each
|
| 22 |
+
#IFS=',' read -ra MODELS <<< "$model"
|
| 23 |
+
for m in "${MODELS[@]}"; do
|
| 24 |
+
echo "Pulling $m"
|
| 25 |
+
ollama pull "$m"
|
| 26 |
+
sleep 5
|
| 27 |
+
done
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
ollama create aws-path-learning -f ./Modelfile
|
| 31 |
+
|
| 32 |
+
# Run the Python application
|
| 33 |
+
exec python streamlit run ./src/app.py
|
| 34 |
+
|
| 35 |
+
# Keep the script running to prevent the container from exiting
|
| 36 |
+
#wait
|
requirements.txt
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
websocket-client
|
| 2 |
+
python-dotenv
|
| 3 |
+
requests
|
| 4 |
+
tls-client
|
| 5 |
+
pypasser
|
| 6 |
+
names
|
| 7 |
+
colorama
|
| 8 |
+
curl_cffi
|
| 9 |
+
aiohttp
|
| 10 |
+
flask
|
| 11 |
+
flask_cors
|
| 12 |
+
flask-babel
|
| 13 |
+
streamlit
|
| 14 |
+
selenium
|
| 15 |
+
fake-useragent
|
| 16 |
+
twocaptcha
|
| 17 |
+
pydantic
|
| 18 |
+
pymailtm
|
| 19 |
+
Levenshtein
|
| 20 |
+
retrying
|
| 21 |
+
numpy>=1.22.2 # pinned to avoid a vulnerability
|
| 22 |
+
tornado>=6.3.2 # pinned to avoid a vulnerability
|
| 23 |
+
PyExecJS
|
| 24 |
+
openai==0.28.1
|
| 25 |
+
g4f
|
| 26 |
+
langchain-community
|
| 27 |
+
ollama
|