HyraXuna commited on
Commit
cd47910
·
1 Parent(s): a4ac3df

my first commit

Browse files
Files changed (2) hide show
  1. Dockerfile +46 -0
  2. requirements.txt +10 -0
Dockerfile ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Build from a LINUX lightweight version of Anaconda
2
+ FROM continuumio/miniconda3
3
+
4
+ # Update packages and install nano unzip and curl
5
+ RUN apt-get update
6
+ RUN apt-get install nano unzip curl -y
7
+
8
+ # Install AWS cli - Necessary since we are going to interact with S3
9
+ RUN curl "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o "awscliv2.zip"
10
+ RUN unzip awscliv2.zip
11
+ RUN ./aws/install
12
+
13
+ # THIS IS SPECIFIC TO HUGGINFACE
14
+ # We create a new user named "user" with ID of 1000
15
+ RUN useradd -m -u 1000 user
16
+ # We switch from "root" (default user when creating an image) to "user"
17
+ USER user
18
+ # We set two environmnet variables
19
+ # so that we can give ownership to all files in there afterwards
20
+ # we also add /home/user/.local/bin in the $PATH environment variable
21
+ # PATH environment variable sets paths to look for installed binaries
22
+ # We update it so that Linux knows where to look for binaries if we were to install them with "user".
23
+ ENV HOME=/home/user \
24
+ PATH=/home/user/.local/bin:$PATH
25
+
26
+ # We set working directory to $HOME/app (<=> /home/user/app)
27
+ WORKDIR $HOME/app
28
+
29
+ # Copy and install dependencies
30
+ COPY requirements.txt /requirements.txt
31
+ RUN pip install -r /requirements.txt
32
+
33
+ # Copy all local files to /home/user/app with "user" as owner of these files
34
+ # Always use --chown=user when using HUGGINGFACE to avoid permission errors
35
+ COPY --chown=user . $HOME/app
36
+
37
+
38
+ # Launch mlflow server
39
+ # Here we chose to have $PORT as environment variable but you could have hard coded 7860
40
+ # If you are sure to push into production
41
+ # Advantage to use an env variable is that your code is more portable if you were to deploy to another
42
+ # type of server
43
+ CMD mlflow server -p $PORT \
44
+ --host 0.0.0.0 \
45
+ --backend-store-uri $BACKEND_STORE_URI \
46
+ --default-artifact-root $ARTIFACT_STORE_URI
requirements.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ boto3
2
+ pandas
3
+ gunicorn
4
+ streamlit
5
+ scikit-learn
6
+ matplotlib
7
+ seaborn
8
+ plotly
9
+ mlflow
10
+ psycopg2-binary