Andrew2505 commited on
Commit
df4eea9
·
verified ·
1 Parent(s): 94ddcf0

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +13 -0
  2. app.py +69 -0
  3. model_1.joblib +3 -0
  4. requirements.txt +9 -0
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Uvicorn
13
+ CMD ["uvicorn", "app:superkart_api", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import joblib
2
+ import numpy as np
3
+ import pandas as pd
4
+ from flask import Flask, request, jsonify
5
+
6
+ superkart_api = FastAPI("Superkart Sales Prediction")
7
+
8
+ model = joblib.load('model_1.joblib')
9
+
10
+ @superkart_api.get('/')
11
+ def home():
12
+ return "Welcome to SuperKart Sales Prediction API!"
13
+
14
+ @superkart_api.post('/v1/superkart_single')
15
+ def salepred_single():
16
+ sales_data = request.get_json()
17
+
18
+ # Read input data
19
+ sample = {
20
+ 'Product_Weight':sale_data['Product_Weight'],
21
+ 'Product_Sugar_Content':sale_data['Product_Sugar_Content'],
22
+ 'Product_Allocated_Area':sale_data['Product_Allocated_Area'],
23
+ 'Product_Type':sale_data['Product_Type'],
24
+ 'Product_MRP':sale_data['Product_MRP'],
25
+ 'Store_Id':sale_data['Store_Id'],
26
+ 'Store_Size':sale_data['Store_Size'],
27
+ 'Store_Location_City_Type':sale_data['Store_Location_City_Type'],
28
+ 'Store_Type':sale_data['Store_Type'],
29
+
30
+ }
31
+ input_data = pd.DataFrame([sample])
32
+
33
+ # Make predictions
34
+ predicted_sale = model.predict(input_data)[0]
35
+
36
+ # Create response
37
+ response = {'Store_Outlet':sample['Store_Id'],"Sale":round(float(predicted_sale), 2)}
38
+ return jsonify(response)
39
+
40
+ @superkart_api.post('/v1/superkart_batch')
41
+ def salepred_batch():
42
+ file = request.files['file']
43
+ print("File Received:", file.filename)
44
+
45
+ # Read input data
46
+ input_data = pd.read_csv(file)
47
+
48
+ # Make predictions
49
+ predicted_sale = model.predict(input_data).tolist()
50
+
51
+ # Add predictions to input data
52
+ input_data['Predicted_Sale'] = predicted_sale
53
+
54
+ # Group by Store_Id and sum the predicted sales
55
+ grouped_sales = input_data.groupby('Store_Id')['Predicted_Sale'].sum().to_dict()
56
+
57
+ # Create response
58
+ response = {
59
+ 'store_sales': {store_id: round(float(sale), 2) for store_id, sale in grouped_sales.items()}
60
+ }
61
+ print("Final Response:", response)
62
+
63
+ return jsonify(response)
64
+
65
+
66
+
67
+
68
+ if __name__=='__main__':
69
+ superkart_api.run()
model_1.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12d5d4995153ea1ec62ffc53c9891a2893e3cd3582c14696d30a074e6822d385
3
+ size 47878618
requirements.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ flask==3.1.2
5
+ joblib==1.4.2
6
+ xgboost==2.1.4
7
+ streamlit==1.49.1
8
+ requests==2.32.3
9
+ uvicorn[standard]