Update app.py
Browse files
app.py
CHANGED
|
@@ -1,9 +1,4 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
# app.py (for Hugging Face Spaces deployment)
|
| 5 |
-
|
| 6 |
-
import streamlit as st
|
| 7 |
import pickle
|
| 8 |
import numpy as np
|
| 9 |
from sklearn.preprocessing import StandardScaler
|
|
@@ -15,35 +10,28 @@ def load_model():
|
|
| 15 |
return model
|
| 16 |
|
| 17 |
# Make predictions with the model
|
| 18 |
-
def predict(model,
|
|
|
|
| 19 |
scaler = StandardScaler()
|
| 20 |
new_data_scaled = scaler.fit_transform(new_data) # Standardize the data
|
| 21 |
-
return model.predict(new_data_scaled)
|
| 22 |
|
| 23 |
-
#
|
| 24 |
def main():
|
| 25 |
-
st.title('Random Forest Prediction App')
|
| 26 |
-
|
| 27 |
-
st.write("This app uses a pre-trained Random Forest model to make predictions.")
|
| 28 |
-
|
| 29 |
-
# Input data
|
| 30 |
-
feature1 = st.number_input("Feature 1", min_value=0.0, max_value=10.0, value=5.0)
|
| 31 |
-
feature2 = st.number_input("Feature 2", min_value=0.0, max_value=10.0, value=3.0)
|
| 32 |
-
feature3 = st.number_input("Feature 3", min_value=0.0, max_value=10.0, value=7.0)
|
| 33 |
-
feature4 = st.number_input("Feature 4", min_value=0.0, max_value=10.0, value=6.0)
|
| 34 |
-
feature5 = st.number_input("Feature 5", min_value=0.0, max_value=10.0, value=4.0)
|
| 35 |
-
|
| 36 |
-
# Prepare input data
|
| 37 |
-
new_data = np.array([[feature1, feature2, feature3, feature4, feature5]])
|
| 38 |
-
|
| 39 |
-
# Load the model and make predictions
|
| 40 |
model = load_model()
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 47 |
|
| 48 |
if __name__ == '__main__':
|
| 49 |
main()
|
|
|
|
| 1 |
+
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2 |
import pickle
|
| 3 |
import numpy as np
|
| 4 |
from sklearn.preprocessing import StandardScaler
|
|
|
|
| 10 |
return model
|
| 11 |
|
| 12 |
# Make predictions with the model
|
| 13 |
+
def predict(model, feature1, feature2, feature3, feature4, feature5):
|
| 14 |
+
new_data = np.array([[feature1, feature2, feature3, feature4, feature5]])
|
| 15 |
scaler = StandardScaler()
|
| 16 |
new_data_scaled = scaler.fit_transform(new_data) # Standardize the data
|
| 17 |
+
return model.predict(new_data_scaled)[0]
|
| 18 |
|
| 19 |
+
# Gradio Interface
|
| 20 |
def main():
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
model = load_model()
|
| 22 |
+
|
| 23 |
+
inputs = [
|
| 24 |
+
gr.inputs.Slider(minimum=0, maximum=10, default=5, label="Feature 1"),
|
| 25 |
+
gr.inputs.Slider(minimum=0, maximum=10, default=3, label="Feature 2"),
|
| 26 |
+
gr.inputs.Slider(minimum=0, maximum=10, default=7, label="Feature 3"),
|
| 27 |
+
gr.inputs.Slider(minimum=0, maximum=10, default=6, label="Feature 4"),
|
| 28 |
+
gr.inputs.Slider(minimum=0, maximum=10, default=4, label="Feature 5")
|
| 29 |
+
]
|
| 30 |
+
|
| 31 |
+
output = gr.outputs.Textbox()
|
| 32 |
+
|
| 33 |
+
gr.Interface(fn=lambda feature1, feature2, feature3, feature4, feature5: predict(model, feature1, feature2, feature3, feature4, feature5),
|
| 34 |
+
inputs=inputs, outputs=output, live=True).launch()
|
| 35 |
|
| 36 |
if __name__ == '__main__':
|
| 37 |
main()
|