kmsmohamedansar commited on
Commit
48e2b49
·
verified ·
1 Parent(s): 23bef42

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -0
app.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+
3
+
4
+ # app.py (for Hugging Face Spaces deployment)
5
+
6
+ import streamlit as st
7
+ import pickle
8
+ import numpy as np
9
+ from sklearn.preprocessing import StandardScaler
10
+
11
+ # Load the model
12
+ def load_model():
13
+ with open("rf_model.pkl", "rb") as f:
14
+ model = pickle.load(f)
15
+ return model
16
+
17
+ # Make predictions with the model
18
+ def predict(model, new_data):
19
+ scaler = StandardScaler()
20
+ new_data_scaled = scaler.fit_transform(new_data) # Standardize the data
21
+ return model.predict(new_data_scaled)
22
+
23
+ # Streamlit app UI
24
+ def main():
25
+ st.title('Random Forest Prediction App')
26
+
27
+ st.write("This app uses a pre-trained Random Forest model to make predictions.")
28
+
29
+ # Input data
30
+ feature1 = st.number_input("Feature 1", min_value=0.0, max_value=10.0, value=5.0)
31
+ feature2 = st.number_input("Feature 2", min_value=0.0, max_value=10.0, value=3.0)
32
+ feature3 = st.number_input("Feature 3", min_value=0.0, max_value=10.0, value=7.0)
33
+ feature4 = st.number_input("Feature 4", min_value=0.0, max_value=10.0, value=6.0)
34
+ feature5 = st.number_input("Feature 5", min_value=0.0, max_value=10.0, value=4.0)
35
+
36
+ # Prepare input data
37
+ new_data = np.array([[feature1, feature2, feature3, feature4, feature5]])
38
+
39
+ # Load the model and make predictions
40
+ model = load_model()
41
+ if st.button('Predict'):
42
+ if model:
43
+ predictions = predict(model, new_data)
44
+ st.write(f"Predictions: {predictions}")
45
+ else:
46
+ st.write("Model not loaded correctly.")
47
+
48
+ if __name__ == '__main__':
49
+ main()