ABCABCABC999 commited on
Commit
7a2e552
Β·
verified Β·
1 Parent(s): 415bb19

Upload Flask backend files

Browse files
Files changed (3) hide show
  1. Dockerfile +3 -10
  2. app.py +61 -51
  3. requirements.txt +7 -2
Dockerfile CHANGED
@@ -1,10 +1,3 @@
1
- # Dockerfile for SuperKart Streamlit Frontend
2
- FROM python:3.10-slim
3
-
4
- WORKDIR /app
5
-
6
- COPY . .
7
-
8
- RUN pip install --no-cache-dir -r requirements.txt
9
-
10
- CMD ["streamlit", "run", "app.py", "--server.port=7861", "--server.address=0.0.0.0"]
 
1
+ # Backend
2
+ #docker build -t superkart-backend .
3
+ #docker run -p 7860:7860 superkart-backend
 
 
 
 
 
 
 
app.py CHANGED
@@ -1,52 +1,62 @@
1
- import streamlit as st
2
  import pandas as pd
3
- import requests
4
-
5
- st.title("πŸ›’ SuperKart Quarterly Sales Predictor")
6
-
7
- # Input form
8
- st.subheader("πŸ” Predict Store's Quarterly Sales")
9
-
10
- store_id = st.selectbox("Store ID", ["OUT001", "OUT002", "OUT003", "OUT004"])
11
- product_type = st.selectbox("Product Type", ["Dairy", "Soft Drinks", "Meat", "Canned", "Frozen Foods"])
12
- sugar_content = st.selectbox("Product Sugar Content", ["Low", "Medium", "High", "No Added Sugar"])
13
- store_type = st.selectbox("Store Type", ["Supermarket Type1", "Supermarket Type2", "Grocery Store", "Food Mart"])
14
- city_type = st.selectbox("City Type", ["Urban", "Semi-Urban", "Rural"])
15
- store_size = st.selectbox("Store Size", ["Small", "Medium", "High"])
16
-
17
- est_year = st.number_input("Store Establishment Year", min_value=1980, max_value=2025, value=2005)
18
- weight = st.number_input("Product Weight", min_value=0.0, value=12.0)
19
- area = st.number_input("Product Allocated Area", min_value=0.0, value=125.0)
20
- mrp = st.number_input("Product MRP", min_value=0.0, value=120.0)
21
-
22
- input_data = pd.DataFrame([{
23
- 'Store_Id': store_id,
24
- 'Product_Type': product_type,
25
- 'Product_Sugar_Content': sugar_content,
26
- 'Store_Type': store_type,
27
- 'Store_Location_City_Type': city_type,
28
- 'Store_Size': store_size,
29
- 'Store_Establishment_Year': est_year,
30
- 'Product_Weight': weight,
31
- 'Product_Allocated_Area': area,
32
- 'Product_MRP': mrp
33
- }])
34
-
35
- if st.button("Predict Sales"):
36
- api_url = "https://<your-backend-space>.hf.space/v1/storesales"
37
- response = requests.post(api_url, json=input_data.to_dict(orient='records'))
38
- if response.status_code == 200:
39
- result = response.json()
40
- st.success(f"πŸ“¦ Predicted Total Sales: β‚Ή{result['Total_Store_Sales']:,.2f}")
41
- else:
42
- st.error(f"❌ API Error: {response.text}")
43
-
44
- st.subheader("πŸ“ Batch Prediction via CSV")
45
- file = st.file_uploader("Upload CSV", type=["csv"])
46
-
47
- if file and st.button("Predict Batch"):
48
- response = requests.post("https://<your-backend-space>.hf.space/v1/storesalesbatch", files={"file": file})
49
- if response.status_code == 200:
50
- st.write(pd.DataFrame(response.json()))
51
- else:
52
- st.error(f"❌ Batch API Error: {response.text}")
 
 
 
 
 
 
 
 
 
 
 
1
+ import joblib
2
  import pandas as pd
3
+ from flask import Flask, request, jsonify
4
+ from waitress import serve
5
+
6
+ app = Flask("SuperKart_Quarterly_Sales_Predictor")
7
+
8
+ # Load model
9
+ model = joblib.load("deployment_files/quarterly_sales_prediction_model_v1_0.joblib")
10
+
11
+ @app.get('/')
12
+ def home():
13
+ return "βœ… SuperKart API is live"
14
+
15
+ @app.post('/v1/storesales')
16
+ def predict_sales():
17
+ try:
18
+ data = request.get_json()
19
+ df = pd.DataFrame(data)
20
+
21
+ feature_cols = [
22
+ 'Product_Weight', 'Product_Allocated_Area',
23
+ 'Product_MRP', 'Product_Sugar_Content', 'Product_Type',
24
+ 'Store_Establishment_Year', 'Store_Id', 'Store_Type',
25
+ 'Store_Size', 'Store_Location_City_Type'
26
+ ]
27
+ # 'Product_Sugar_Content', 'Product_Type',
28
+ predictions = model.predict(df[feature_cols])
29
+ total_sales = round(predictions.sum(), 2)
30
+
31
+ return jsonify({'Total_Store_Sales': total_sales})
32
+ except Exception as e:
33
+ return jsonify({'error': str(e)}), 500
34
+
35
+ @app.post('/v1/storesalesbatch')
36
+ def predict_batch():
37
+ try:
38
+ file = request.files['file']
39
+ df = pd.read_csv(file)
40
+
41
+ feature_cols = [
42
+ 'Product_Weight', 'Product_Allocated_Area',
43
+ 'Product_MRP', 'Product_Sugar_Content', 'Product_Type',
44
+ 'Store_Establishment_Year', 'Store_Id', 'Store_Type',
45
+ 'Store_Size', 'Store_Location_City_Type'
46
+ ]
47
+
48
+ df['Predicted_Sales'] = model.predict(df[feature_cols])
49
+ summary = (
50
+ df.groupby('Store_Id')['Predicted_Sales']
51
+ .sum()
52
+ .round(2)
53
+ .reset_index()
54
+ .rename(columns={'Predicted_Sales': 'Total_Sales'})
55
+ )
56
+
57
+ return summary.to_dict(orient="records")
58
+ except Exception as e:
59
+ return jsonify({'error': str(e)}), 500
60
+
61
+ if __name__ == '__main__':
62
+ serve(app, host="0.0.0.0", port=7860)
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- streamlit==1.34.0
 
2
  pandas==2.2.2
3
- requests==2.31.0
 
 
 
 
 
1
+ Flask==2.2.2
2
+ waitress==2.1.2
3
  pandas==2.2.2
4
+ numpy==1.26.4
5
+ joblib==1.4.2
6
+ scikit-learn==1.6.1
7
+ xgboost==2.1.4
8
+