ok
Browse files- Dockerfile +3 -1
- S3_user_accessKeys.csv +0 -4
- app.py +43 -5
Dockerfile
CHANGED
|
@@ -47,4 +47,6 @@ ENV ARTIFACT_STORE_URI=$ARTIFACT_STORE_URI
|
|
| 47 |
CMD mlflow server -p $PORT \
|
| 48 |
--host 0.0.0.0 \
|
| 49 |
--backend-store-uri $BACKEND_STORE_URI \
|
| 50 |
-
--default-artifact-root $ARTIFACT_STORE_URI
|
|
|
|
|
|
|
|
|
| 47 |
CMD mlflow server -p $PORT \
|
| 48 |
--host 0.0.0.0 \
|
| 49 |
--backend-store-uri $BACKEND_STORE_URI \
|
| 50 |
+
--default-artifact-root $ARTIFACT_STORE_URI \
|
| 51 |
+
--allowed-hosts $ML_SPACE \
|
| 52 |
+
--cors-allowed-origins $ML_SPACE
|
S3_user_accessKeys.csv
DELETED
|
@@ -1,4 +0,0 @@
|
|
| 1 |
-
Access key ID,Secret access key
|
| 2 |
-
AKIA3RYUSMRNBV2EG4B6,ttC9RHfi3q2RJlA6zkZnOXCh90tICVVVLfuWUe11
|
| 3 |
-
BACKEND_STORE_URI : psql 'postgresql://neondb_owner:npg_rmiFXnLj0A5C@ep-old-shadow-abmcscsp-pooler.eu-west-2.aws.neon.tech/neondb?sslmode=require&channel_binding=require'
|
| 4 |
-
ARTIFACT_STORE_URI : s3://mlflow-bucket-36-yeah/mlfow/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app.py
CHANGED
|
@@ -1,7 +1,45 @@
|
|
| 1 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
|
| 3 |
-
|
|
|
|
| 4 |
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import mlflow
|
| 2 |
+
import pandas as pd
|
| 3 |
+
from sklearn.datasets import load_iris
|
| 4 |
+
from sklearn.linear_model import LogisticRegression
|
| 5 |
+
from sklearn.model_selection import train_test_split
|
| 6 |
+
import os
|
| 7 |
|
| 8 |
+
# Load Iris dataset
|
| 9 |
+
iris = load_iris()
|
| 10 |
|
| 11 |
+
# Split dataset into X features and Target variable
|
| 12 |
+
X = pd.DataFrame(data = iris["data"], columns= iris["feature_names"])
|
| 13 |
+
y = pd.Series(data = iris["target"], name="target")
|
| 14 |
+
|
| 15 |
+
# Split our training set and our test set
|
| 16 |
+
X_train, X_test, y_train, y_test = train_test_split(X, y)
|
| 17 |
+
|
| 18 |
+
# Set your variables for your environment
|
| 19 |
+
EXPERIMENT_NAME="iris-classification"
|
| 20 |
+
|
| 21 |
+
# Set tracking URI to your Hugging Face application
|
| 22 |
+
mlflow.set_tracking_uri("https://alvlt-test.hf.space")
|
| 23 |
+
|
| 24 |
+
# Set experiment's info
|
| 25 |
+
mlflow.set_experiment(EXPERIMENT_NAME)
|
| 26 |
+
|
| 27 |
+
# Get our experiment info
|
| 28 |
+
experiment = mlflow.get_experiment_by_name(EXPERIMENT_NAME)
|
| 29 |
+
|
| 30 |
+
# Call mlflow autolog
|
| 31 |
+
mlflow.sklearn.autolog()
|
| 32 |
+
|
| 33 |
+
# Instanciate and fit the model
|
| 34 |
+
lr = LogisticRegression()
|
| 35 |
+
lr.fit(X_train.values, y_train.values)
|
| 36 |
+
|
| 37 |
+
# Store metrics
|
| 38 |
+
predicted_qualities = lr.predict(X_test.values)
|
| 39 |
+
accuracy = lr.score(X_test.values, y_test.values)
|
| 40 |
+
|
| 41 |
+
# mlflow.sklearn.log_model(lr,"iris-log-reg")
|
| 42 |
+
|
| 43 |
+
# Print results
|
| 44 |
+
print("LogisticRegression model")
|
| 45 |
+
print("Accuracy: {}".format(accuracy))
|