raj thakur commited on
Commit Β·
314f3d5
0
Parent(s):
Initial commit
Browse filesThis view is limited to 50 files because it contains too many changes. Β
See raw diff
- OLD/Analysis.ipynb +0 -0
- OLD/Chatbot.py +52 -0
- OLD/Main.py +244 -0
- OLD/__dbmlsystem.py +596 -0
- OLD/sampleDashboard.py +350 -0
- __pycache__/analytics.cpython-310.pyc +0 -0
- __pycache__/analytics.cpython-313.pyc +0 -0
- __pycache__/config.cpython-313.pyc +0 -0
- __pycache__/data_processor.cpython-310.pyc +0 -0
- __pycache__/data_processor.cpython-313.pyc +0 -0
- __pycache__/database.cpython-310.pyc +0 -0
- __pycache__/database.cpython-313.pyc +0 -0
- __pycache__/database_schema.cpython-313.pyc +0 -0
- __pycache__/excel_exporter.cpython-313.pyc +0 -0
- __pycache__/main.cpython-313.pyc +0 -0
- __pycache__/sales_manager.cpython-313.pyc +0 -0
- __pycache__/whatsapp_manager.cpython-310.pyc +0 -0
- analytics.py +192 -0
- automation.py +88 -0
- components/__init__.py +0 -0
- components/__pycache__/__init__.cpython-310.pyc +0 -0
- components/__pycache__/database_status.cpython-310.pyc +0 -0
- components/database_status.py +26 -0
- data/AMBERAVPURA ENGLISH SABHASAD LIST.xlsx +0 -0
- data/APRIL 24-25.xlsx +0 -0
- data/AUGUST 24-25.xlsx +0 -0
- data/JULY 24-25.xlsx +0 -0
- data/JUNE 24-25.xlsx +0 -0
- data/MAY 24-25.xlsx +0 -0
- data/SEPTEMBER 24-25.xlsx +0 -0
- data/amiyad.xlsx +0 -0
- data/dharkhuniya.xlsx +0 -0
- data/distributors.xlsx +0 -0
- data/kamrol.xlsx +0 -0
- data/sandha.xlsx +0 -0
- data/vishnoli.xlsx +0 -0
- data_processor.py +710 -0
- database.py +893 -0
- main.py +211 -0
- pages/__init__.py +0 -0
- pages/__pycache__/__init__.cpython-310.pyc +0 -0
- pages/__pycache__/customers.cpython-310.pyc +0 -0
- pages/__pycache__/dashboard.cpython-310.pyc +0 -0
- pages/__pycache__/data_import.cpython-310.pyc +0 -0
- pages/__pycache__/demos.cpython-310.pyc +0 -0
- pages/__pycache__/distributors.cpython-310.pyc +0 -0
- pages/__pycache__/file_viewer.cpython-310.pyc +0 -0
- pages/__pycache__/payments.cpython-310.pyc +0 -0
- pages/__pycache__/reports.cpython-310.pyc +0 -0
- pages/__pycache__/sales.cpython-310.pyc +0 -0
OLD/Analysis.ipynb
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
OLD/Chatbot.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import requests
|
| 4 |
+
|
| 5 |
+
# ---- Load sample data ----
|
| 6 |
+
@st.cache_data
|
| 7 |
+
def load_data():
|
| 8 |
+
return pd.read_csv("data.csv")
|
| 9 |
+
|
| 10 |
+
df = load_data()
|
| 11 |
+
|
| 12 |
+
# ---- Sidebar ----
|
| 13 |
+
st.sidebar.title("Controls")
|
| 14 |
+
task = st.sidebar.selectbox("Choose task", ["Chat with Bot (via n8n)", "Analyze Data"])
|
| 15 |
+
|
| 16 |
+
# ---- Main UI ----
|
| 17 |
+
st.title("π¬ Data Analysis Assistant (Streamlit + n8n + Ollama)")
|
| 18 |
+
|
| 19 |
+
if task == "Analyze Data":
|
| 20 |
+
st.subheader("π Sales Data")
|
| 21 |
+
st.dataframe(df)
|
| 22 |
+
|
| 23 |
+
st.write("### Total Sales:")
|
| 24 |
+
st.metric("π΅ Amount", f"${df['amount'].sum():,.2f}")
|
| 25 |
+
|
| 26 |
+
top_customer = df.groupby("customer")["amount"].sum().idxmax()
|
| 27 |
+
st.write(f"**Top Customer:** {top_customer}")
|
| 28 |
+
|
| 29 |
+
elif task == "Chat with Bot (via n8n)":
|
| 30 |
+
st.subheader("π€ Ask Questions")
|
| 31 |
+
user_input = st.text_area("Your question:", placeholder="e.g. Who spent the most?")
|
| 32 |
+
|
| 33 |
+
if st.button("Ask Bot") and user_input:
|
| 34 |
+
# Send request to n8n webhook
|
| 35 |
+
payload = {
|
| 36 |
+
"question": user_input,
|
| 37 |
+
"data": df.to_dict(orient="records")
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
try:
|
| 41 |
+
response = requests.post(
|
| 42 |
+
"http://localhost:5678/webhook/chatbot",
|
| 43 |
+
json=payload,
|
| 44 |
+
timeout=60
|
| 45 |
+
)
|
| 46 |
+
if response.ok:
|
| 47 |
+
answer = response.json().get("answer", response.text)
|
| 48 |
+
st.success(answer)
|
| 49 |
+
else:
|
| 50 |
+
st.error(f"n8n Error: {response.status_code}")
|
| 51 |
+
except Exception as e:
|
| 52 |
+
st.error(f"Connection failed: {e}")
|
OLD/Main.py
ADDED
|
@@ -0,0 +1,244 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
|
| 5 |
+
def analyze_sales_data(data1, data2):
|
| 6 |
+
"""
|
| 7 |
+
Analyze sales data to identify targets for mantri communication and village focus
|
| 8 |
+
"""
|
| 9 |
+
|
| 10 |
+
# Convert date column if needed
|
| 11 |
+
data1['Date'] = pd.to_datetime(data1['Date'])
|
| 12 |
+
|
| 13 |
+
# Clean and preprocess data2
|
| 14 |
+
data2['Date'] = pd.to_datetime(data2['Date'])
|
| 15 |
+
|
| 16 |
+
# Calculate key metrics from Data1 (village level)
|
| 17 |
+
data1['Conversion_Rate'] = (data1['Contact_In_Group'] / data1['Sabhasad'] * 100).round(2)
|
| 18 |
+
data1['Conversion_Rate'] = data1['Conversion_Rate'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 19 |
+
data1['Untapped_Potential'] = data1['Sabhasad'] - data1['Contact_In_Group']
|
| 20 |
+
data1['Sales_Per_Contact'] = (data1['Total_L'] / data1['Contact_In_Group']).round(2)
|
| 21 |
+
data1['Sales_Per_Contact'] = data1['Sales_Per_Contact'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 22 |
+
|
| 23 |
+
# Calculate priority score for villages
|
| 24 |
+
data1['Priority_Score'] = (
|
| 25 |
+
(data1['Untapped_Potential'] / data1['Untapped_Potential'].max() * 50) +
|
| 26 |
+
((100 - data1['Conversion_Rate']) / 100 * 50)
|
| 27 |
+
).round(2)
|
| 28 |
+
|
| 29 |
+
# Analyze recent sales from Data2 (customer level)
|
| 30 |
+
# Since we don't have customer contact info, we'll analyze at village level
|
| 31 |
+
recent_sales = data2.groupby('Village').agg({
|
| 32 |
+
'Total_L': ['sum', 'count'],
|
| 33 |
+
'Date': 'max'
|
| 34 |
+
}).reset_index()
|
| 35 |
+
|
| 36 |
+
# Flatten the column names
|
| 37 |
+
recent_sales.columns = ['Village', 'Recent_Sales_L', 'Recent_Customers', 'Last_Sale_Date']
|
| 38 |
+
|
| 39 |
+
# Calculate days since last sale
|
| 40 |
+
recent_sales['Days_Since_Last_Sale'] = (datetime.now() - recent_sales['Last_Sale_Date']).dt.days
|
| 41 |
+
|
| 42 |
+
# Merge with Data1
|
| 43 |
+
analysis_df = data1.merge(recent_sales, on='Village', how='left')
|
| 44 |
+
analysis_df['Recent_Sales_L'] = analysis_df['Recent_Sales_L'].fillna(0)
|
| 45 |
+
analysis_df['Recent_Customers'] = analysis_df['Recent_Customers'].fillna(0)
|
| 46 |
+
analysis_df['Days_Since_Last_Sale'] = analysis_df['Days_Since_Last_Sale'].fillna(999)
|
| 47 |
+
|
| 48 |
+
# Generate recommendations for mantris
|
| 49 |
+
recommendations = []
|
| 50 |
+
|
| 51 |
+
for _, row in analysis_df.iterrows():
|
| 52 |
+
village = row['Village']
|
| 53 |
+
mantri = row['Mantri_Name']
|
| 54 |
+
mobile = row['Mantri_Mobile']
|
| 55 |
+
taluka = row['Taluka']
|
| 56 |
+
district = row['District']
|
| 57 |
+
|
| 58 |
+
# Recommendation logic
|
| 59 |
+
if row['Conversion_Rate'] < 20:
|
| 60 |
+
recommendations.append({
|
| 61 |
+
'Village': village,
|
| 62 |
+
'Taluka': taluka,
|
| 63 |
+
'District': district,
|
| 64 |
+
'Mantri': mantri,
|
| 65 |
+
'Mobile': mobile,
|
| 66 |
+
'Action': 'Send Marketing Team',
|
| 67 |
+
'Reason': f'Low conversion rate ({row["Conversion_Rate"]:.1f}%) - Only {row["Contact_In_Group"]} of {row["Sabhasad"]} sabhasad contacted',
|
| 68 |
+
'Priority': 'High',
|
| 69 |
+
'Score': row['Priority_Score']
|
| 70 |
+
})
|
| 71 |
+
elif row['Untapped_Potential'] > 30:
|
| 72 |
+
recommendations.append({
|
| 73 |
+
'Village': village,
|
| 74 |
+
'Taluka': taluka,
|
| 75 |
+
'District': district,
|
| 76 |
+
'Mantri': mantri,
|
| 77 |
+
'Mobile': mobile,
|
| 78 |
+
'Action': 'Call Mantri for Follow-up',
|
| 79 |
+
'Reason': f'High untapped potential ({row["Untapped_Potential"]} sabhasad not contacted)',
|
| 80 |
+
'Priority': 'High',
|
| 81 |
+
'Score': row['Priority_Score']
|
| 82 |
+
})
|
| 83 |
+
elif row['Days_Since_Last_Sale'] > 30:
|
| 84 |
+
recommendations.append({
|
| 85 |
+
'Village': village,
|
| 86 |
+
'Taluka': taluka,
|
| 87 |
+
'District': district,
|
| 88 |
+
'Mantri': mantri,
|
| 89 |
+
'Mobile': mobile,
|
| 90 |
+
'Action': 'Check on Mantri',
|
| 91 |
+
'Reason': f'No recent sales ({row["Days_Since_Last_Sale"]} days since last sale)',
|
| 92 |
+
'Priority': 'Medium',
|
| 93 |
+
'Score': row['Priority_Score']
|
| 94 |
+
})
|
| 95 |
+
elif row['Sales_Per_Contact'] > 10:
|
| 96 |
+
recommendations.append({
|
| 97 |
+
'Village': village,
|
| 98 |
+
'Taluka': taluka,
|
| 99 |
+
'District': district,
|
| 100 |
+
'Mantri': mantri,
|
| 101 |
+
'Mobile': mobile,
|
| 102 |
+
'Action': 'Provide More Stock',
|
| 103 |
+
'Reason': f'High sales per contact ({row["Sales_Per_Contact"]}L per contact)',
|
| 104 |
+
'Priority': 'Medium',
|
| 105 |
+
'Score': row['Priority_Score']
|
| 106 |
+
})
|
| 107 |
+
else:
|
| 108 |
+
recommendations.append({
|
| 109 |
+
'Village': village,
|
| 110 |
+
'Taluka': taluka,
|
| 111 |
+
'District': district,
|
| 112 |
+
'Mantri': mantri,
|
| 113 |
+
'Mobile': mobile,
|
| 114 |
+
'Action': 'Regular Follow-up',
|
| 115 |
+
'Reason': 'Steady performance - maintain relationship',
|
| 116 |
+
'Priority': 'Low',
|
| 117 |
+
'Score': row['Priority_Score']
|
| 118 |
+
})
|
| 119 |
+
|
| 120 |
+
return pd.DataFrame(recommendations), analysis_df
|
| 121 |
+
|
| 122 |
+
def generate_mantri_messages(recommendations):
|
| 123 |
+
"""
|
| 124 |
+
Generate personalized WhatsApp messages for mantris based on recommendations
|
| 125 |
+
"""
|
| 126 |
+
messages = []
|
| 127 |
+
|
| 128 |
+
for _, row in recommendations.iterrows():
|
| 129 |
+
if row['Action'] == 'Send Marketing Team':
|
| 130 |
+
message = f"""
|
| 131 |
+
Namaste {row['Mantri']} Ji!
|
| 132 |
+
|
| 133 |
+
Aapke kshetra {row['Village']} mein humare calcium supplement ki conversion rate kam hai.
|
| 134 |
+
Humari marketing team aapke yaha demo dene aayegi.
|
| 135 |
+
Kripya taiyaari rakhein aur sabhi dudh utpadakon ko soochit karein.
|
| 136 |
+
|
| 137 |
+
Dhanyavaad,
|
| 138 |
+
Calcium Supplement Team
|
| 139 |
+
"""
|
| 140 |
+
elif row['Action'] == 'Call Mantri for Follow-up':
|
| 141 |
+
message = f"""
|
| 142 |
+
Namaste {row['Mantri']} Ji!
|
| 143 |
+
|
| 144 |
+
Aapke kshetra {row['Village']} mein bahut se aise farmers hain jo abhi tak humare product se anabhijit hain.
|
| 145 |
+
Kripya unse sampark karein aur unhe product ke fayde batayein.
|
| 146 |
+
Aapke liye special commission offer hai agle 10 customers ke liye.
|
| 147 |
+
|
| 148 |
+
Dhanyavaad,
|
| 149 |
+
Calcium Supplement Team
|
| 150 |
+
"""
|
| 151 |
+
elif row['Action'] == 'Check on Mantri':
|
| 152 |
+
message = f"""
|
| 153 |
+
Namaste {row['Mantri']} Ji!
|
| 154 |
+
|
| 155 |
+
Humne dekha ki aapke kshetra {row['Village']} mein kuch samay se sales nahi hue hain.
|
| 156 |
+
Kya koi samasya hai? Kya hum aapki kisi tarah madad kar sakte hain?
|
| 157 |
+
|
| 158 |
+
Kripya hame batayein.
|
| 159 |
+
|
| 160 |
+
Dhanyavaad,
|
| 161 |
+
Calcium Supplement Team
|
| 162 |
+
"""
|
| 163 |
+
elif row['Action'] == 'Provide More Stock':
|
| 164 |
+
message = f"""
|
| 165 |
+
Namaste {row['Mantri']} Ji!
|
| 166 |
+
|
| 167 |
+
Badhai ho! Aapke kshetra {row['Village']} mein humare product ki demand badh rahi hai.
|
| 168 |
+
Kya aapko aur stock ki zaroorat hai? Hum jald se jald aapko extra stock bhej denge.
|
| 169 |
+
|
| 170 |
+
Dhanyavaad,
|
| 171 |
+
Calcium Supplement Team
|
| 172 |
+
"""
|
| 173 |
+
else:
|
| 174 |
+
message = f"""
|
| 175 |
+
Namaste {row['Mantri']} Ji!
|
| 176 |
+
|
| 177 |
+
Aapke kshetra {row['Village']} mein humare product ki sales theek chal rahi hain.
|
| 178 |
+
Kripya aise hi continue rakhein aur koi bhi sujhav ho toh hame batayein.
|
| 179 |
+
|
| 180 |
+
Dhanyavaad,
|
| 181 |
+
Calcium Supplement Team
|
| 182 |
+
"""
|
| 183 |
+
|
| 184 |
+
messages.append({
|
| 185 |
+
'Mantri': row['Mantri'],
|
| 186 |
+
'Mobile': row['Mobile'],
|
| 187 |
+
'Village': row['Village'],
|
| 188 |
+
'Action': row['Action'],
|
| 189 |
+
'Message': message,
|
| 190 |
+
'Priority': row['Priority']
|
| 191 |
+
})
|
| 192 |
+
|
| 193 |
+
return pd.DataFrame(messages)
|
| 194 |
+
|
| 195 |
+
def identify_demo_locations(analysis_df, top_n=5):
|
| 196 |
+
"""
|
| 197 |
+
Identify the best locations for demos based on various factors
|
| 198 |
+
"""
|
| 199 |
+
# Calculate a demo score based on multiple factors
|
| 200 |
+
analysis_df['Demo_Score'] = (
|
| 201 |
+
(analysis_df['Untapped_Potential'] / analysis_df['Untapped_Potential'].max() * 40) +
|
| 202 |
+
((100 - analysis_df['Conversion_Rate']) / 100 * 30) +
|
| 203 |
+
(analysis_df['Recent_Sales_L'] / analysis_df['Recent_Sales_L'].max() * 30)
|
| 204 |
+
).round(2)
|
| 205 |
+
|
| 206 |
+
# Get top locations for demos
|
| 207 |
+
demo_locations = analysis_df.nlargest(top_n, 'Demo_Score')[
|
| 208 |
+
['Village', 'Taluka', 'District', 'Mantri_Name', 'Mantri_Mobile',
|
| 209 |
+
'Conversion_Rate', 'Untapped_Potential', 'Demo_Score']
|
| 210 |
+
]
|
| 211 |
+
|
| 212 |
+
return demo_locations
|
| 213 |
+
|
| 214 |
+
# Example usage with sample data structure
|
| 215 |
+
def main():
|
| 216 |
+
# Sample data based on your new structure
|
| 217 |
+
data2=pd.read_excel("sampletesting.xlsx",sheet_name="Sheet1")
|
| 218 |
+
data1=pd.read_excel("sampletesting.xlsx",sheet_name="Sheet2")
|
| 219 |
+
|
| 220 |
+
# Generate recommendations
|
| 221 |
+
recommendations, analysis = analyze_sales_data(data1, data2)
|
| 222 |
+
|
| 223 |
+
print("RECOMMENDED ACTIONS:")
|
| 224 |
+
print(recommendations.sort_values('Score', ascending=False).to_string(index=False))
|
| 225 |
+
|
| 226 |
+
# Generate messages for mantris
|
| 227 |
+
mantri_messages = generate_mantri_messages(recommendations)
|
| 228 |
+
|
| 229 |
+
print("\nMANTRI MESSAGES:")
|
| 230 |
+
for _, msg in mantri_messages.iterrows():
|
| 231 |
+
print(f"\nTo: {msg['Mantri']} ({msg['Mobile']}) - {msg['Village']}")
|
| 232 |
+
print(f"Action: {msg['Action']}")
|
| 233 |
+
print(f"Message: {msg['Message']}")
|
| 234 |
+
|
| 235 |
+
# Identify demo locations
|
| 236 |
+
demo_locations = identify_demo_locations(analysis)
|
| 237 |
+
|
| 238 |
+
print("\nTOP DEMO LOCATIONS:")
|
| 239 |
+
print(demo_locations.to_string(index=False))
|
| 240 |
+
|
| 241 |
+
return recommendations, mantri_messages, demo_locations
|
| 242 |
+
|
| 243 |
+
if __name__ == "__main__":
|
| 244 |
+
recommendations, mantri_messages, demo_locations = main()
|
OLD/__dbmlsystem.py
ADDED
|
@@ -0,0 +1,596 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import numpy as np
|
| 4 |
+
import plotly.express as px
|
| 5 |
+
import plotly.graph_objects as go
|
| 6 |
+
from datetime import datetime, timedelta
|
| 7 |
+
from sklearn.ensemble import RandomForestClassifier, RandomForestRegressor
|
| 8 |
+
from sklearn.model_selection import train_test_split
|
| 9 |
+
from sklearn.preprocessing import StandardScaler
|
| 10 |
+
from sklearn.cluster import KMeans
|
| 11 |
+
import warnings
|
| 12 |
+
warnings.filterwarnings('ignore')
|
| 13 |
+
|
| 14 |
+
# Set page configuration
|
| 15 |
+
st.set_page_config(
|
| 16 |
+
page_title="Calcium Supplement Sales Automation",
|
| 17 |
+
page_icon="π",
|
| 18 |
+
layout="wide",
|
| 19 |
+
initial_sidebar_state="expanded"
|
| 20 |
+
)
|
| 21 |
+
|
| 22 |
+
# App title
|
| 23 |
+
st.title("π Calcium Supplement Sales Automation Dashboard")
|
| 24 |
+
st.markdown("---")
|
| 25 |
+
|
| 26 |
+
# Your exact ML functions
|
| 27 |
+
def enhanced_analyze_sales_data(data1, data2):
|
| 28 |
+
"""
|
| 29 |
+
Enhanced analysis with ML components for better predictions
|
| 30 |
+
"""
|
| 31 |
+
|
| 32 |
+
data1['Date'] = pd.to_datetime(data1['Date'])
|
| 33 |
+
data2['Date'] = pd.to_datetime(data2['Date'])
|
| 34 |
+
|
| 35 |
+
# Calculate basic metrics
|
| 36 |
+
data1['Conversion_Rate'] = (data1['Contact_In_Group'] / data1['Sabhasad'] * 100).round(2)
|
| 37 |
+
data1['Conversion_Rate'] = data1['Conversion_Rate'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 38 |
+
data1['Untapped_Potential'] = data1['Sabhasad'] - data1['Contact_In_Group']
|
| 39 |
+
data1['Sales_Per_Contact'] = (data1['Total_L'] / data1['Contact_In_Group']).round(2)
|
| 40 |
+
data1['Sales_Per_Contact'] = data1['Sales_Per_Contact'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 41 |
+
|
| 42 |
+
# Analyze recent sales
|
| 43 |
+
recent_sales = data2.groupby('Village').agg({
|
| 44 |
+
'Total_L': ['sum', 'count'],
|
| 45 |
+
'Date': 'max'
|
| 46 |
+
}).reset_index()
|
| 47 |
+
|
| 48 |
+
recent_sales.columns = ['Village', 'Recent_Sales_L', 'Recent_Customers', 'Last_Sale_Date']
|
| 49 |
+
recent_sales['Days_Since_Last_Sale'] = (datetime.now() - recent_sales['Last_Sale_Date']).dt.days
|
| 50 |
+
|
| 51 |
+
# Merge data
|
| 52 |
+
analysis_df = data1.merge(recent_sales, on='Village', how='left')
|
| 53 |
+
analysis_df['Recent_Sales_L'] = analysis_df['Recent_Sales_L'].fillna(0)
|
| 54 |
+
analysis_df['Recent_Customers'] = analysis_df['Recent_Customers'].fillna(0)
|
| 55 |
+
analysis_df['Days_Since_Last_Sale'] = analysis_df['Days_Since_Last_Sale'].fillna(999)
|
| 56 |
+
|
| 57 |
+
# ML Component 1: Village Clustering for Segmentation
|
| 58 |
+
analysis_df = apply_village_clustering(analysis_df)
|
| 59 |
+
|
| 60 |
+
# ML Component 2: Predict Sales Potential
|
| 61 |
+
analysis_df = predict_sales_potential(analysis_df)
|
| 62 |
+
|
| 63 |
+
# ML Component 3: Action Recommendation Classifier
|
| 64 |
+
analysis_df = predict_recommended_actions(analysis_df)
|
| 65 |
+
|
| 66 |
+
# Generate recommendations based on ML predictions
|
| 67 |
+
recommendations = generate_ml_recommendations(analysis_df)
|
| 68 |
+
|
| 69 |
+
return recommendations, analysis_df
|
| 70 |
+
|
| 71 |
+
def apply_village_clustering(analysis_df):
|
| 72 |
+
"""
|
| 73 |
+
Use K-Means clustering to segment villages into groups
|
| 74 |
+
"""
|
| 75 |
+
# Prepare features for clustering
|
| 76 |
+
cluster_features = analysis_df[[
|
| 77 |
+
'Conversion_Rate', 'Untapped_Potential', 'Sales_Per_Contact',
|
| 78 |
+
'Recent_Sales_L', 'Days_Since_Last_Sale'
|
| 79 |
+
]].fillna(0)
|
| 80 |
+
|
| 81 |
+
# Standardize features
|
| 82 |
+
scaler = StandardScaler()
|
| 83 |
+
scaled_features = scaler.fit_transform(cluster_features)
|
| 84 |
+
|
| 85 |
+
# Apply K-Means clustering
|
| 86 |
+
kmeans = KMeans(n_clusters=4, random_state=42, n_init=10)
|
| 87 |
+
clusters = kmeans.fit_predict(scaled_features)
|
| 88 |
+
|
| 89 |
+
# Add clusters to dataframe
|
| 90 |
+
analysis_df['Cluster'] = clusters
|
| 91 |
+
|
| 92 |
+
# Name the clusters based on characteristics
|
| 93 |
+
cluster_names = {
|
| 94 |
+
0: 'High Potential - Low Engagement',
|
| 95 |
+
1: 'Steady Performers',
|
| 96 |
+
2: 'Underperforming',
|
| 97 |
+
3: 'New/Developing'
|
| 98 |
+
}
|
| 99 |
+
|
| 100 |
+
analysis_df['Segment'] = analysis_df['Cluster'].map(cluster_names)
|
| 101 |
+
|
| 102 |
+
return analysis_df
|
| 103 |
+
|
| 104 |
+
def predict_sales_potential(analysis_df):
|
| 105 |
+
"""
|
| 106 |
+
Predict sales potential for each village using Random Forest
|
| 107 |
+
"""
|
| 108 |
+
# Prepare features for prediction
|
| 109 |
+
prediction_features = analysis_df[[
|
| 110 |
+
'Sabhasad', 'Contact_In_Group', 'Conversion_Rate',
|
| 111 |
+
'Untapped_Potential', 'Recent_Sales_L', 'Days_Since_Last_Sale'
|
| 112 |
+
]].fillna(0)
|
| 113 |
+
|
| 114 |
+
# Target variable: Total_L (current sales)
|
| 115 |
+
target = analysis_df['Total_L'].fillna(0)
|
| 116 |
+
|
| 117 |
+
# Only train if we have enough data
|
| 118 |
+
if len(prediction_features) > 10:
|
| 119 |
+
# Split data
|
| 120 |
+
X_train, X_test, y_train, y_test = train_test_split(
|
| 121 |
+
prediction_features, target, test_size=0.2, random_state=42
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
# Train model
|
| 125 |
+
model = RandomForestRegressor(n_estimators=100, random_state=42)
|
| 126 |
+
model.fit(X_train, y_train)
|
| 127 |
+
|
| 128 |
+
# Make predictions
|
| 129 |
+
predictions = model.predict(prediction_features)
|
| 130 |
+
|
| 131 |
+
# Calculate feature importance
|
| 132 |
+
feature_importance = pd.DataFrame({
|
| 133 |
+
'feature': prediction_features.columns,
|
| 134 |
+
'importance': model.feature_importances_
|
| 135 |
+
}).sort_values('importance', ascending=False)
|
| 136 |
+
|
| 137 |
+
# Add predictions to dataframe
|
| 138 |
+
analysis_df['Predicted_Sales'] = predictions
|
| 139 |
+
analysis_df['Sales_Gap'] = analysis_df['Predicted_Sales'] - analysis_df['Total_L']
|
| 140 |
+
else:
|
| 141 |
+
# Fallback if not enough data
|
| 142 |
+
analysis_df['Predicted_Sales'] = analysis_df['Total_L']
|
| 143 |
+
analysis_df['Sales_Gap'] = 0
|
| 144 |
+
|
| 145 |
+
return analysis_df
|
| 146 |
+
|
| 147 |
+
def predict_recommended_actions(analysis_df):
|
| 148 |
+
"""
|
| 149 |
+
Use ML to predict the best action for each village
|
| 150 |
+
"""
|
| 151 |
+
# Define actions based on rules (for training data)
|
| 152 |
+
analysis_df['Action_Label'] = np.where(
|
| 153 |
+
analysis_df['Conversion_Rate'] < 20, 'Send Marketing Team',
|
| 154 |
+
np.where(
|
| 155 |
+
analysis_df['Untapped_Potential'] > 30, 'Call Mantri for Follow-up',
|
| 156 |
+
np.where(
|
| 157 |
+
analysis_df['Days_Since_Last_Sale'] > 30, 'Check on Mantri',
|
| 158 |
+
np.where(
|
| 159 |
+
analysis_df['Sales_Per_Contact'] > 10, 'Provide More Stock',
|
| 160 |
+
'Regular Follow-up'
|
| 161 |
+
)
|
| 162 |
+
)
|
| 163 |
+
)
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
# Prepare features for classification
|
| 167 |
+
classification_features = analysis_df[[
|
| 168 |
+
'Conversion_Rate', 'Untapped_Potential', 'Sales_Per_Contact',
|
| 169 |
+
'Recent_Sales_L', 'Days_Since_Last_Sale', 'Sales_Gap'
|
| 170 |
+
]].fillna(0)
|
| 171 |
+
|
| 172 |
+
# Target variable: Action_Label
|
| 173 |
+
target = analysis_df['Action_Label']
|
| 174 |
+
|
| 175 |
+
# Only train if we have enough data
|
| 176 |
+
if len(classification_features) > 10 and len(target.unique()) > 1:
|
| 177 |
+
# Split data
|
| 178 |
+
X_train, X_test, y_train, y_test = train_test_split(
|
| 179 |
+
classification_features, target, test_size=0.2, random_state=42, stratify=target
|
| 180 |
+
)
|
| 181 |
+
|
| 182 |
+
# Train classifier
|
| 183 |
+
clf = RandomForestClassifier(n_estimators=100, random_state=42)
|
| 184 |
+
clf.fit(X_train, y_train)
|
| 185 |
+
|
| 186 |
+
# Make predictions
|
| 187 |
+
predictions = clf.predict(classification_features)
|
| 188 |
+
prediction_proba = clf.predict_proba(classification_features)
|
| 189 |
+
|
| 190 |
+
# Add predictions to dataframe
|
| 191 |
+
analysis_df['ML_Recommended_Action'] = predictions
|
| 192 |
+
analysis_df['Action_Confidence'] = np.max(prediction_proba, axis=1)
|
| 193 |
+
else:
|
| 194 |
+
# Fallback to rule-based if not enough data
|
| 195 |
+
analysis_df['ML_Recommended_Action'] = analysis_df['Action_Label']
|
| 196 |
+
analysis_df['Action_Confidence'] = 1.0
|
| 197 |
+
|
| 198 |
+
return analysis_df
|
| 199 |
+
|
| 200 |
+
def generate_ml_recommendations(analysis_df):
|
| 201 |
+
"""
|
| 202 |
+
Generate recommendations based on ML predictions
|
| 203 |
+
"""
|
| 204 |
+
recommendations = []
|
| 205 |
+
|
| 206 |
+
for _, row in analysis_df.iterrows():
|
| 207 |
+
village = row['Village']
|
| 208 |
+
mantri = row['Mantri_Name']
|
| 209 |
+
mobile = row['Mantri_Mobile']
|
| 210 |
+
taluka = row['Taluka']
|
| 211 |
+
district = row['District']
|
| 212 |
+
segment = row['Segment']
|
| 213 |
+
action = row['ML_Recommended_Action']
|
| 214 |
+
confidence = row['Action_Confidence']
|
| 215 |
+
|
| 216 |
+
# Generate reason based on ML prediction
|
| 217 |
+
if action == 'Send Marketing Team':
|
| 218 |
+
reason = f"ML predicts marketing team needed (Confidence: {confidence:.2f}). Segment: {segment}"
|
| 219 |
+
priority = 'High'
|
| 220 |
+
elif action == 'Call Mantri for Follow-up':
|
| 221 |
+
reason = f"ML predicts mantri follow-up needed (Confidence: {confidence:.2f}). Segment: {segment}"
|
| 222 |
+
priority = 'High'
|
| 223 |
+
elif action == 'Check on Mantri':
|
| 224 |
+
reason = f"ML suggests checking on mantri (Confidence: {confidence:.2f}). Segment: {segment}"
|
| 225 |
+
priority = 'Medium'
|
| 226 |
+
elif action == 'Provide More Stock':
|
| 227 |
+
reason = f"ML predicts stock increase needed (Confidence: {confidence:.2f}). Segment: {segment}"
|
| 228 |
+
priority = 'Medium'
|
| 229 |
+
else:
|
| 230 |
+
reason = f"ML recommends regular follow-up (Confidence: {confidence:.2f}). Segment: {segment}"
|
| 231 |
+
priority = 'Low'
|
| 232 |
+
|
| 233 |
+
recommendations.append({
|
| 234 |
+
'Village': village,
|
| 235 |
+
'Taluka': taluka,
|
| 236 |
+
'District': district,
|
| 237 |
+
'Mantri': mantri,
|
| 238 |
+
'Mobile': mobile,
|
| 239 |
+
'Action': action,
|
| 240 |
+
'Reason': reason,
|
| 241 |
+
'Priority': priority,
|
| 242 |
+
'Confidence': confidence,
|
| 243 |
+
'Segment': segment,
|
| 244 |
+
'Sales_Gap': row.get('Sales_Gap', 0)
|
| 245 |
+
})
|
| 246 |
+
|
| 247 |
+
return pd.DataFrame(recommendations)
|
| 248 |
+
|
| 249 |
+
def generate_ml_mantri_messages(recommendations):
|
| 250 |
+
"""
|
| 251 |
+
Generate personalized messages based on ML recommendations
|
| 252 |
+
"""
|
| 253 |
+
messages = []
|
| 254 |
+
|
| 255 |
+
for _, row in recommendations.iterrows():
|
| 256 |
+
if row['Action'] == 'Send Marketing Team':
|
| 257 |
+
message = f"""
|
| 258 |
+
Namaste {row['Mantri']} Ji!
|
| 259 |
+
|
| 260 |
+
Our AI system has identified that your village {row['Village']} has high potential for growth.
|
| 261 |
+
We're sending our marketing team to conduct demo sessions and help you reach more customers.
|
| 262 |
+
|
| 263 |
+
Based on our analysis:
|
| 264 |
+
- Segment: {row['Segment']}
|
| 265 |
+
- Confidence: {row['Confidence']*100:.1f}%
|
| 266 |
+
|
| 267 |
+
Please prepare for their visit and notify potential customers.
|
| 268 |
+
|
| 269 |
+
Dhanyavaad,
|
| 270 |
+
Calcium Supplement Team
|
| 271 |
+
"""
|
| 272 |
+
elif row['Action'] == 'Call Mantri for Follow-up':
|
| 273 |
+
message = f"""
|
| 274 |
+
Namaste {row['Mantri']} Ji!
|
| 275 |
+
|
| 276 |
+
Our AI analysis shows significant untapped potential in {row['Village']}.
|
| 277 |
+
We recommend focusing on follow-up with these customers:
|
| 278 |
+
|
| 279 |
+
- Segment: {row['Segment']}
|
| 280 |
+
- Confidence: {row['Confidence']*100:.1f}%
|
| 281 |
+
|
| 282 |
+
A special commission offer is available for your next 10 customers.
|
| 283 |
+
|
| 284 |
+
Dhanyavaad,
|
| 285 |
+
Calcium Supplement Team
|
| 286 |
+
"""
|
| 287 |
+
elif row['Action'] == 'Check on Mantri':
|
| 288 |
+
message = f"""
|
| 289 |
+
Namaste {row['Mantri']} Ji!
|
| 290 |
+
|
| 291 |
+
Our system shows reduced activity in {row['Village']}.
|
| 292 |
+
Is everything alright? Do you need any support from our team?
|
| 293 |
+
|
| 294 |
+
- Segment: {row['Segment']}
|
| 295 |
+
- Confidence: {row['Confidence']*100:.1f}%
|
| 296 |
+
|
| 297 |
+
Please let us know how we can help.
|
| 298 |
+
|
| 299 |
+
Dhanyavaad,
|
| 300 |
+
Calcium Supplement Team
|
| 301 |
+
"""
|
| 302 |
+
elif row['Action'] == 'Provide More Stock':
|
| 303 |
+
message = f"""
|
| 304 |
+
Namaste {row['Mantri']} Ji!
|
| 305 |
+
|
| 306 |
+
Great news! Our AI predicts increased demand in {row['Village']}.
|
| 307 |
+
Would you like us to send additional stock?
|
| 308 |
+
|
| 309 |
+
- Segment: {row['Segment']}
|
| 310 |
+
- Confidence: {row['Confidence']*100:.1f}%
|
| 311 |
+
- Predicted Sales Gap: {row['Sales_Gap']:.1f}L
|
| 312 |
+
|
| 313 |
+
Please confirm your additional requirements.
|
| 314 |
+
|
| 315 |
+
Dhanyavaad,
|
| 316 |
+
Calcium Supplement Team
|
| 317 |
+
"""
|
| 318 |
+
else:
|
| 319 |
+
message = f"""
|
| 320 |
+
Namaste {row['Mantri']} Ji!
|
| 321 |
+
|
| 322 |
+
Our system shows steady performance in {row['Village']}.
|
| 323 |
+
Keep up the good work!
|
| 324 |
+
|
| 325 |
+
- Segment: {row['Segment']}
|
| 326 |
+
- Confidence: {row['Confidence']*100:.1f}%
|
| 327 |
+
|
| 328 |
+
As always, let us know if you need any support.
|
| 329 |
+
|
| 330 |
+
Dhanyavaad,
|
| 331 |
+
Calcium Supplement Team
|
| 332 |
+
"""
|
| 333 |
+
|
| 334 |
+
messages.append({
|
| 335 |
+
'Mantri': row['Mantri'],
|
| 336 |
+
'Mobile': row['Mobile'],
|
| 337 |
+
'Village': row['Village'],
|
| 338 |
+
'Action': row['Action'],
|
| 339 |
+
'Message': message,
|
| 340 |
+
'Priority': row['Priority'],
|
| 341 |
+
'Confidence': row['Confidence']
|
| 342 |
+
})
|
| 343 |
+
|
| 344 |
+
return pd.DataFrame(messages)
|
| 345 |
+
|
| 346 |
+
# Visualization functions
|
| 347 |
+
def plot_village_performance(analysis_df):
|
| 348 |
+
"""Create performance visualization for villages"""
|
| 349 |
+
fig = px.scatter(analysis_df,
|
| 350 |
+
x='Conversion_Rate',
|
| 351 |
+
y='Untapped_Potential',
|
| 352 |
+
size='Total_L',
|
| 353 |
+
color='Segment',
|
| 354 |
+
hover_name='Village',
|
| 355 |
+
title='Village Performance Analysis',
|
| 356 |
+
labels={'Conversion_Rate': 'Conversion Rate (%)',
|
| 357 |
+
'Untapped_Potential': 'Untapped Potential'})
|
| 358 |
+
|
| 359 |
+
fig.update_layout(height=500)
|
| 360 |
+
return fig
|
| 361 |
+
|
| 362 |
+
def plot_sales_trends(analysis_df):
|
| 363 |
+
"""Create sales trends visualization"""
|
| 364 |
+
fig = px.bar(analysis_df,
|
| 365 |
+
x='Village',
|
| 366 |
+
y='Total_L',
|
| 367 |
+
color='Segment',
|
| 368 |
+
title='Total Sales by Village',
|
| 369 |
+
labels={'Total_L': 'Total Sales (L)', 'Village': 'Village'})
|
| 370 |
+
|
| 371 |
+
fig.update_layout(height=400, xaxis_tickangle=-45)
|
| 372 |
+
return fig
|
| 373 |
+
|
| 374 |
+
def plot_priority_matrix(recommendations):
|
| 375 |
+
"""Create priority matrix visualization"""
|
| 376 |
+
priority_order = {'High': 3, 'Medium': 2, 'Low': 1}
|
| 377 |
+
recommendations['Priority_Value'] = recommendations['Priority'].map(priority_order)
|
| 378 |
+
|
| 379 |
+
fig = px.treemap(recommendations,
|
| 380 |
+
path=['Priority', 'Village'],
|
| 381 |
+
values='Priority_Value',
|
| 382 |
+
color='Priority_Value',
|
| 383 |
+
color_continuous_scale='RdYlGn_r',
|
| 384 |
+
title='Action Priority Matrix')
|
| 385 |
+
|
| 386 |
+
fig.update_layout(height=500)
|
| 387 |
+
return fig
|
| 388 |
+
|
| 389 |
+
def display_key_metrics(analysis_df):
|
| 390 |
+
"""Display key performance metrics"""
|
| 391 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 392 |
+
|
| 393 |
+
with col1:
|
| 394 |
+
st.metric("Total Villages", len(analysis_df))
|
| 395 |
+
with col2:
|
| 396 |
+
avg_conversion = analysis_df['Conversion_Rate'].mean()
|
| 397 |
+
st.metric("Avg Conversion Rate", f"{avg_conversion:.1f}%")
|
| 398 |
+
with col3:
|
| 399 |
+
total_untapped = analysis_df['Untapped_Potential'].sum()
|
| 400 |
+
st.metric("Total Untapped Potential", f"{total_untapped}")
|
| 401 |
+
with col4:
|
| 402 |
+
total_sales = analysis_df['Total_L'].sum()
|
| 403 |
+
st.metric("Total Sales (L)", f"{total_sales}")
|
| 404 |
+
|
| 405 |
+
# Initialize session state
|
| 406 |
+
if 'data1' not in st.session_state:
|
| 407 |
+
st.session_state.data1 = None
|
| 408 |
+
if 'data2' not in st.session_state:
|
| 409 |
+
st.session_state.data2 = None
|
| 410 |
+
if 'analysis_df' not in st.session_state:
|
| 411 |
+
st.session_state.analysis_df = None
|
| 412 |
+
if 'recommendations' not in st.session_state:
|
| 413 |
+
st.session_state.recommendations = None
|
| 414 |
+
if 'ml_messages' not in st.session_state:
|
| 415 |
+
st.session_state.ml_messages = None
|
| 416 |
+
|
| 417 |
+
# Sidebar
|
| 418 |
+
with st.sidebar:
|
| 419 |
+
st.header("Data Input")
|
| 420 |
+
|
| 421 |
+
# File uploaders
|
| 422 |
+
st.subheader("Upload Village Data (Data1)")
|
| 423 |
+
uploaded_data1 = st.file_uploader("CSV or Excel file", type=["csv", "xlsx"], key="data1")
|
| 424 |
+
|
| 425 |
+
st.subheader("Upload Sales Data (Data2)")
|
| 426 |
+
uploaded_data2 = st.file_uploader("CSV or Excel file", type=["csv", "xlsx"], key="data2")
|
| 427 |
+
|
| 428 |
+
if st.button("Load Data and Run ML Analysis"):
|
| 429 |
+
if uploaded_data1 and uploaded_data2:
|
| 430 |
+
try:
|
| 431 |
+
# Load data
|
| 432 |
+
if uploaded_data1.name.endswith('.csv'):
|
| 433 |
+
data1 = pd.read_csv(uploaded_data1)
|
| 434 |
+
else:
|
| 435 |
+
data1 = pd.read_excel(uploaded_data1)
|
| 436 |
+
|
| 437 |
+
if uploaded_data2.name.endswith('.csv'):
|
| 438 |
+
data2 = pd.read_csv(uploaded_data2)
|
| 439 |
+
else:
|
| 440 |
+
data2 = pd.read_excel(uploaded_data2)
|
| 441 |
+
|
| 442 |
+
# Store in session state
|
| 443 |
+
st.session_state.data1 = data1
|
| 444 |
+
st.session_state.data2 = data2
|
| 445 |
+
|
| 446 |
+
# Run ML analysis
|
| 447 |
+
with st.spinner("Running ML analysis..."):
|
| 448 |
+
recommendations, analysis_df = enhanced_analyze_sales_data(data1, data2)
|
| 449 |
+
st.session_state.analysis_df = analysis_df
|
| 450 |
+
st.session_state.recommendations = recommendations
|
| 451 |
+
|
| 452 |
+
ml_messages = generate_ml_mantri_messages(recommendations)
|
| 453 |
+
st.session_state.ml_messages = ml_messages
|
| 454 |
+
|
| 455 |
+
st.success("ML analysis completed successfully!")
|
| 456 |
+
|
| 457 |
+
except Exception as e:
|
| 458 |
+
st.error(f"Error processing data: {str(e)}")
|
| 459 |
+
else:
|
| 460 |
+
st.error("Please upload both files to proceed")
|
| 461 |
+
|
| 462 |
+
# Main content
|
| 463 |
+
if st.session_state.analysis_df is not None and st.session_state.recommendations is not None:
|
| 464 |
+
# Display dashboard
|
| 465 |
+
tab1, tab2, tab3, tab4 = st.tabs(["Dashboard", "Village Analysis", "Actions & Messages", "Team Dispatch"])
|
| 466 |
+
|
| 467 |
+
with tab1:
|
| 468 |
+
st.header("ML-Powered Performance Dashboard")
|
| 469 |
+
display_key_metrics(st.session_state.analysis_df)
|
| 470 |
+
|
| 471 |
+
col1, col2 = st.columns(2)
|
| 472 |
+
|
| 473 |
+
with col1:
|
| 474 |
+
st.plotly_chart(plot_village_performance(st.session_state.analysis_df), use_container_width=True)
|
| 475 |
+
|
| 476 |
+
with col2:
|
| 477 |
+
st.plotly_chart(plot_priority_matrix(st.session_state.recommendations), use_container_width=True)
|
| 478 |
+
|
| 479 |
+
st.plotly_chart(plot_sales_trends(st.session_state.analysis_df), use_container_width=True)
|
| 480 |
+
|
| 481 |
+
with tab2:
|
| 482 |
+
st.header("Village Analysis with ML Segmentation")
|
| 483 |
+
|
| 484 |
+
selected_village = st.selectbox("Select Village", st.session_state.analysis_df['Village'].unique())
|
| 485 |
+
village_data = st.session_state.analysis_df[st.session_state.analysis_df['Village'] == selected_village].iloc[0]
|
| 486 |
+
|
| 487 |
+
col1, col2 = st.columns(2)
|
| 488 |
+
|
| 489 |
+
with col1:
|
| 490 |
+
st.subheader("Village Details")
|
| 491 |
+
st.write(f"**Village:** {village_data['Village']}")
|
| 492 |
+
st.write(f"**Taluka:** {village_data['Taluka']}")
|
| 493 |
+
st.write(f"**District:** {village_data['District']}")
|
| 494 |
+
st.write(f"**Mantri:** {village_data['Mantri_Name']}")
|
| 495 |
+
st.write(f"**Mantri Mobile:** {village_data['Mantri_Mobile']}")
|
| 496 |
+
st.write(f"**Segment:** {village_data.get('Segment', 'N/A')}")
|
| 497 |
+
st.write(f"**ML Recommended Action:** {village_data.get('ML_Recommended_Action', 'N/A')}")
|
| 498 |
+
st.write(f"**Action Confidence:** {village_data.get('Action_Confidence', 'N/A'):.2f}")
|
| 499 |
+
|
| 500 |
+
with col2:
|
| 501 |
+
st.subheader("Performance Metrics")
|
| 502 |
+
st.write(f"**Sabhasad:** {village_data['Sabhasad']}")
|
| 503 |
+
st.write(f"**Contacted:** {village_data['Contact_In_Group']}")
|
| 504 |
+
st.write(f"**Conversion Rate:** {village_data['Conversion_Rate']}%")
|
| 505 |
+
st.write(f"**Untapped Potential:** {village_data['Untapped_Potential']}")
|
| 506 |
+
st.write(f"**Total Sales:** {village_data['Total_L']}L")
|
| 507 |
+
st.write(f"**Sales per Contact:** {village_data['Sales_Per_Contact']}L")
|
| 508 |
+
st.write(f"**Predicted Sales:** {village_data.get('Predicted_Sales', 'N/A'):.1f}L")
|
| 509 |
+
st.write(f"**Sales Gap:** {village_data.get('Sales_Gap', 'N/A'):.1f}L")
|
| 510 |
+
|
| 511 |
+
with tab3:
|
| 512 |
+
st.header("ML-Based Actions & Messages")
|
| 513 |
+
|
| 514 |
+
st.subheader("ML-Generated Recommendations")
|
| 515 |
+
st.dataframe(st.session_state.recommendations)
|
| 516 |
+
|
| 517 |
+
# Download recommendations
|
| 518 |
+
csv_data = st.session_state.recommendations.to_csv(index=False)
|
| 519 |
+
st.download_button(
|
| 520 |
+
label="Download Recommendations as CSV",
|
| 521 |
+
data=csv_data,
|
| 522 |
+
file_name="ml_sales_recommendations.csv",
|
| 523 |
+
mime="text/csv"
|
| 524 |
+
)
|
| 525 |
+
|
| 526 |
+
st.subheader("Generate ML-Powered Messages")
|
| 527 |
+
selected_mantri = st.selectbox("Select Mantri", st.session_state.recommendations['Mantri'].unique())
|
| 528 |
+
mantri_data = st.session_state.recommendations[
|
| 529 |
+
st.session_state.recommendations['Mantri'] == selected_mantri].iloc[0]
|
| 530 |
+
|
| 531 |
+
message_df = st.session_state.ml_messages[
|
| 532 |
+
st.session_state.ml_messages['Mantri'] == selected_mantri]
|
| 533 |
+
|
| 534 |
+
if not message_df.empty:
|
| 535 |
+
message = message_df.iloc[0]['Message']
|
| 536 |
+
st.text_area("ML-Generated Message", message, height=300)
|
| 537 |
+
|
| 538 |
+
if st.button("Send Message"):
|
| 539 |
+
st.success(f"Message sent to {mantri_data['Mantri']} at {mantri_data['Mobile']}")
|
| 540 |
+
|
| 541 |
+
st.subheader("Bulk Message Sender")
|
| 542 |
+
if st.button("Generate All ML Messages"):
|
| 543 |
+
st.session_state.all_messages = st.session_state.ml_messages
|
| 544 |
+
|
| 545 |
+
if 'all_messages' in st.session_state:
|
| 546 |
+
st.dataframe(st.session_state.all_messages[['Mantri', 'Village', 'Action', 'Priority', 'Confidence']])
|
| 547 |
+
|
| 548 |
+
if st.button("Send All ML Messages"):
|
| 549 |
+
progress_bar = st.progress(0)
|
| 550 |
+
for i, row in st.session_state.all_messages.iterrows():
|
| 551 |
+
# Simulate sending message
|
| 552 |
+
progress_bar.progress((i + 1) / len(st.session_state.all_messages))
|
| 553 |
+
st.success("All ML-powered messages sent successfully!")
|
| 554 |
+
|
| 555 |
+
with tab4:
|
| 556 |
+
st.header("Marketing Team Dispatch with ML Insights")
|
| 557 |
+
|
| 558 |
+
st.subheader("Villages Needing Team Visit (ML Identified)")
|
| 559 |
+
high_priority = st.session_state.recommendations[
|
| 560 |
+
st.session_state.recommendations['Action'] == 'Send Marketing Team']
|
| 561 |
+
|
| 562 |
+
if not high_priority.empty:
|
| 563 |
+
for _, row in high_priority.iterrows():
|
| 564 |
+
with st.expander(f"{row['Village']} - {row['Mantri']} (Confidence: {row['Confidence']:.2f})"):
|
| 565 |
+
st.write(f"**Reason:** {row['Reason']}")
|
| 566 |
+
st.write(f"**Segment:** {row['Segment']}")
|
| 567 |
+
st.write(f"**Sales Gap:** {row['Sales_Gap']:.1f}L")
|
| 568 |
+
|
| 569 |
+
dispatch_date = st.date_input("Dispatch Date", key=f"date_{row['Village']}")
|
| 570 |
+
team_size = st.slider("Team Size", 1, 5, 2, key=f"size_{row['Village']}")
|
| 571 |
+
|
| 572 |
+
if st.button("Schedule Dispatch", key=f"dispatch_{row['Village']}"):
|
| 573 |
+
st.success(f"Team dispatch scheduled for {row['Village']} on {dispatch_date}")
|
| 574 |
+
else:
|
| 575 |
+
st.info("No villages currently require immediate team dispatch based on ML analysis.")
|
| 576 |
+
|
| 577 |
+
st.subheader("ML Performance Insights")
|
| 578 |
+
st.write("Based on our machine learning analysis, here are key insights:")
|
| 579 |
+
|
| 580 |
+
# Show segment distribution
|
| 581 |
+
segment_counts = st.session_state.analysis_df['Segment'].value_counts()
|
| 582 |
+
fig = px.pie(values=segment_counts.values, names=segment_counts.index,
|
| 583 |
+
title="Village Segment Distribution")
|
| 584 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 585 |
+
|
| 586 |
+
# Show confidence distribution
|
| 587 |
+
fig = px.histogram(st.session_state.recommendations, x='Confidence',
|
| 588 |
+
title='Confidence Distribution of ML Recommendations')
|
| 589 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 590 |
+
|
| 591 |
+
else:
|
| 592 |
+
st.info("Please upload your data files using the sidebar and click 'Load Data and Run ML Analysis' to get started.")
|
| 593 |
+
|
| 594 |
+
# Footer
|
| 595 |
+
st.markdown("---")
|
| 596 |
+
st.markdown("**ML-Powered Calcium Supplement Sales Automation System** | For internal use only")
|
OLD/sampleDashboard.py
ADDED
|
@@ -0,0 +1,350 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import numpy as np
|
| 4 |
+
import plotly.express as px
|
| 5 |
+
import plotly.graph_objects as go
|
| 6 |
+
from datetime import datetime, timedelta
|
| 7 |
+
import time
|
| 8 |
+
|
| 9 |
+
# Set page configuration
|
| 10 |
+
st.set_page_config(
|
| 11 |
+
page_title="Calcium Supplement Sales Dashboard",
|
| 12 |
+
page_icon="π",
|
| 13 |
+
layout="wide",
|
| 14 |
+
initial_sidebar_state="expanded"
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
# Sample data (replace with your actual data loading)
|
| 18 |
+
@st.cache_data
|
| 19 |
+
def load_data():
|
| 20 |
+
# Sales data with customer information
|
| 21 |
+
sales_data = pd.DataFrame({
|
| 22 |
+
'Date': ['2025-06-01', '2025-06-01', '2025-06-10', '2025-06-11', '2025-06-12',
|
| 23 |
+
'2025-07-30', '2025-07-30', '2025-07-31', '2025-07-31', '2025-07-31'],
|
| 24 |
+
'Customer': ['Gopalbhai', 'Ramprasad Khatik', 'Vikramsinh', 'Prahladbhai -Mantry', 'V S Stud Farm',
|
| 25 |
+
'Hemendrabhai Parmar', 'Sundarbhai', 'Kamleshbhai Vasava -Mantry', 'Kiranbhai -Mantry', 'Kiritbhai'],
|
| 26 |
+
'Village': ['Shilly', 'Rajasthan', 'Mithapura', 'Bhalod Dairy', 'Waghodia',
|
| 27 |
+
'Panchdevla', 'Siyali', 'Moran', 'Talodara', 'Sindhrot'],
|
| 28 |
+
'Total_L': [35.0, 400.0, 30.0, 7.0, 400.0, 50.0, 13.0, 1.0, 1.0, 30.0]
|
| 29 |
+
})
|
| 30 |
+
|
| 31 |
+
# Mantri data with village information
|
| 32 |
+
mantri_data = pd.DataFrame({
|
| 33 |
+
'DATE': ['2024-03-08', '2025-06-03', '2025-02-23', '2025-05-28', '2025-05-02',
|
| 34 |
+
'2024-09-21', '2024-10-26', '2024-03-19', '2025-01-30', '2025-07-18'],
|
| 35 |
+
'VILLAGE': ['JILOD', 'MANJIPURA', 'GOTHADA', 'UNTKHARI', 'VEMAR',
|
| 36 |
+
'KANODA', 'KOTAMBI', 'RASNOL', 'JITPURA', 'BHATPURA'],
|
| 37 |
+
'MANTRY_NAME': ['AJAYBHAI PATEL', 'AJAYBHAI PATEL', 'AJGAR KHAN', 'AMBALAL CHAUHAN', 'AMBALAL GOHIL',
|
| 38 |
+
'VINUBHAI SOLANKI', 'VISHNUBHAI', 'VITHTHALBHAI', 'YOGESHBHAI', 'YUVRAJSINH'],
|
| 39 |
+
'MOBILE_NO': [7984136988, 9737910554, 9724831903, 9313860902, 9978081739,
|
| 40 |
+
9998756469, 9909550170, 9924590017, 7990383811, 6353209447],
|
| 41 |
+
'sabhasad': [38, 21, 3, 0, 2, 0, 14, 1183, 8, 6],
|
| 42 |
+
'contact_in_group': [38.0, 16.0, 2.0, 0.0, 0.0, 0.0, 14.0, 268.0, 5.0, 4.0],
|
| 43 |
+
'TOTAL_L': [99.0, 120.0, 19.0, 87.0, 32.0, 60.0, 54.0, 82.0, 25.0, 11.0]
|
| 44 |
+
})
|
| 45 |
+
|
| 46 |
+
# Convert dates to datetime
|
| 47 |
+
sales_data['Date'] = pd.to_datetime(sales_data['Date'])
|
| 48 |
+
mantri_data['DATE'] = pd.to_datetime(mantri_data['DATE'], errors='coerce')
|
| 49 |
+
|
| 50 |
+
return sales_data, mantri_data
|
| 51 |
+
|
| 52 |
+
# Analysis functions
|
| 53 |
+
def analyze_mantri_performance(mantri_data, sales_data):
|
| 54 |
+
mantri_data = mantri_data.copy()
|
| 55 |
+
|
| 56 |
+
# Calculate performance metrics
|
| 57 |
+
mantri_data['Conversion_Rate'] = (mantri_data['contact_in_group'] / mantri_data['sabhasad'] * 100).round(2)
|
| 58 |
+
mantri_data['Conversion_Rate'] = mantri_data['Conversion_Rate'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 59 |
+
mantri_data['Untapped_Potential'] = mantri_data['sabhasad'] - mantri_data['contact_in_group']
|
| 60 |
+
mantri_data['Sales_Efficiency'] = (mantri_data['TOTAL_L'] / mantri_data['contact_in_group']).round(2)
|
| 61 |
+
mantri_data['Sales_Efficiency'] = mantri_data['Sales_Efficiency'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 62 |
+
|
| 63 |
+
# Priority score calculation
|
| 64 |
+
mantri_data['Priority_Score'] = (
|
| 65 |
+
(mantri_data['Untapped_Potential'] / mantri_data['Untapped_Potential'].max() * 50) +
|
| 66 |
+
((100 - mantri_data['Conversion_Rate']) / 100 * 50)
|
| 67 |
+
).round(2)
|
| 68 |
+
|
| 69 |
+
# Add recent sales data
|
| 70 |
+
recent_sales = sales_data.groupby('Village').agg({
|
| 71 |
+
'Total_L': 'sum',
|
| 72 |
+
'Customer': 'count'
|
| 73 |
+
}).reset_index()
|
| 74 |
+
recent_sales.columns = ['VILLAGE', 'Recent_Sales', 'Recent_Customers']
|
| 75 |
+
|
| 76 |
+
mantri_data = mantri_data.merge(recent_sales, on='VILLAGE', how='left')
|
| 77 |
+
mantri_data['Recent_Sales'] = mantri_data['Recent_Sales'].fillna(0)
|
| 78 |
+
mantri_data['Recent_Customers'] = mantri_data['Recent_Customers'].fillna(0)
|
| 79 |
+
|
| 80 |
+
return mantri_data
|
| 81 |
+
|
| 82 |
+
def analyze_village_performance(sales_data, mantri_data):
|
| 83 |
+
# Group sales by village
|
| 84 |
+
village_sales = sales_data.groupby('Village').agg({
|
| 85 |
+
'Total_L': 'sum',
|
| 86 |
+
'Customer': 'count',
|
| 87 |
+
'Date': 'max'
|
| 88 |
+
}).reset_index()
|
| 89 |
+
village_sales.columns = ['Village', 'Total_Sales', 'Customer_Count', 'Last_Sale_Date']
|
| 90 |
+
|
| 91 |
+
# Calculate days since last sale
|
| 92 |
+
village_sales['Days_Since_Last_Sale'] = (datetime.now() - village_sales['Last_Sale_Date']).dt.days
|
| 93 |
+
|
| 94 |
+
# Merge with mantri data
|
| 95 |
+
mantri_summary = mantri_data[['VILLAGE', 'MANTRY_NAME', 'MOBILE_NO', 'sabhasad', 'contact_in_group']]
|
| 96 |
+
mantri_summary.columns = ['Village', 'Mantri_Name', 'Mantri_Mobile', 'Sabhasad', 'Contacts']
|
| 97 |
+
|
| 98 |
+
village_performance = village_sales.merge(mantri_summary, on='Village', how='left')
|
| 99 |
+
|
| 100 |
+
# Calculate performance metrics
|
| 101 |
+
village_performance['Conversion_Rate'] = (village_performance['Contacts'] / village_performance['Sabhasad'] * 100).round(2)
|
| 102 |
+
village_performance['Conversion_Rate'] = village_performance['Conversion_Rate'].replace([np.inf, -np.inf], 0).fillna(0)
|
| 103 |
+
village_performance['Untapped_Potential'] = village_performance['Sabhasad'] - village_performance['Contacts']
|
| 104 |
+
|
| 105 |
+
return village_performance
|
| 106 |
+
|
| 107 |
+
# Message templates
|
| 108 |
+
def get_mantri_message_template(mantri_name, village, reason, performance_data):
|
| 109 |
+
templates = {
|
| 110 |
+
'Low Conversion': f"""
|
| 111 |
+
Namaste {mantri_name} Ji!
|
| 112 |
+
|
| 113 |
+
Aapke kshetra {village} mein humare calcium supplement ki conversion rate kam hai ({performance_data['Conversion_Rate']}%).
|
| 114 |
+
Humari marketing team aapke yaha demo dene aayegi.
|
| 115 |
+
Kripya taiyaari rakhein aur sabhi dudh utpadakon ko soochit karein.
|
| 116 |
+
|
| 117 |
+
Aapke paas abhi bhi {int(performance_data['Untapped_Potential'])} aise farmers hain jo product nahi use kar rahe hain.
|
| 118 |
+
|
| 119 |
+
Dhanyavaad,
|
| 120 |
+
Calcium Supplement Team
|
| 121 |
+
""",
|
| 122 |
+
'High Potential': f"""
|
| 123 |
+
Namaste {mantri_name} Ji!
|
| 124 |
+
|
| 125 |
+
Aapke kshetra {village} mein {int(performance_data['Untapped_Potential'])} aise farmers hain jo abhi tak humare product se anabhijit hain.
|
| 126 |
+
Kripya unse sampark karein aur unhe product ke fayde batayein.
|
| 127 |
+
Aapke liye special commission offer hai agle 10 naye customers ke liye.
|
| 128 |
+
|
| 129 |
+
Dhanyavaad,
|
| 130 |
+
Calcium Supplement Team
|
| 131 |
+
""",
|
| 132 |
+
'Good Performance': f"""
|
| 133 |
+
Namaste {mantri_name} Ji!
|
| 134 |
+
|
| 135 |
+
Aapke kshetra {village} mein humare product ki demand badh rahi hai.
|
| 136 |
+
Aapki conversion rate {performance_data['Conversion_Rate']}% hai jo bahut achchi hai.
|
| 137 |
+
|
| 138 |
+
Kripya farmers ko yaad dilaein ki pregnancy ke 3-9 mahine aur delivery ke baad calcium supplement zaroori hai.
|
| 139 |
+
|
| 140 |
+
Dhanyavaad,
|
| 141 |
+
Calcium Supplement Team
|
| 142 |
+
"""
|
| 143 |
+
}
|
| 144 |
+
|
| 145 |
+
return templates.get(reason, "Custom message based on analysis")
|
| 146 |
+
|
| 147 |
+
# Load data
|
| 148 |
+
sales_data, mantri_data = load_data()
|
| 149 |
+
mantri_performance = analyze_mantri_performance(mantri_data, sales_data)
|
| 150 |
+
village_performance = analyze_village_performance(sales_data, mantri_data)
|
| 151 |
+
|
| 152 |
+
# Streamlit app
|
| 153 |
+
st.title("π Calcium Supplement Sales Automation Dashboard")
|
| 154 |
+
st.markdown("---")
|
| 155 |
+
|
| 156 |
+
# Sidebar
|
| 157 |
+
st.sidebar.header("Navigation")
|
| 158 |
+
section = st.sidebar.radio("Go to", ["Dashboard", "Mantri Performance", "Village Analysis", "Message Center", "Team Dispatch"])
|
| 159 |
+
|
| 160 |
+
# Dashboard
|
| 161 |
+
if section == "Dashboard":
|
| 162 |
+
st.header("Sales Performance Overview")
|
| 163 |
+
|
| 164 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 165 |
+
|
| 166 |
+
with col1:
|
| 167 |
+
st.metric("Total Villages Covered", len(mantri_performance))
|
| 168 |
+
with col2:
|
| 169 |
+
st.metric("Total Mantris", len(mantri_performance['MANTRY_NAME'].unique()))
|
| 170 |
+
with col3:
|
| 171 |
+
st.metric("Total Sales (Liters)", mantri_performance['TOTAL_L'].sum())
|
| 172 |
+
with col4:
|
| 173 |
+
avg_conversion = mantri_performance['Conversion_Rate'].mean()
|
| 174 |
+
st.metric("Avg Conversion Rate", f"{avg_conversion:.2f}%")
|
| 175 |
+
|
| 176 |
+
st.subheader("Top Priority Mantris")
|
| 177 |
+
priority_mantris = mantri_performance.nlargest(5, 'Priority_Score')[['MANTRY_NAME', 'VILLAGE', 'Conversion_Rate', 'Untapped_Potential', 'Priority_Score']]
|
| 178 |
+
st.dataframe(priority_mantris)
|
| 179 |
+
|
| 180 |
+
st.subheader("Sales Distribution by Village")
|
| 181 |
+
fig = px.bar(mantri_performance, x='VILLAGE', y='TOTAL_L', title='Total Sales by Village')
|
| 182 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 183 |
+
|
| 184 |
+
st.subheader("Conversion Rate vs Untapped Potential")
|
| 185 |
+
fig = px.scatter(mantri_performance, x='Conversion_Rate', y='Untapped_Potential',
|
| 186 |
+
size='TOTAL_L', color='VILLAGE', hover_name='MANTRY_NAME',
|
| 187 |
+
title='Mantri Performance Analysis')
|
| 188 |
+
st.plotly_chart(fig, use_container_width=True)
|
| 189 |
+
|
| 190 |
+
# Mantri Performance
|
| 191 |
+
elif section == "Mantri Performance":
|
| 192 |
+
st.header("Mantri Performance Analysis")
|
| 193 |
+
|
| 194 |
+
selected_mantri = st.selectbox("Select Mantri", mantri_performance['MANTRY_NAME'].unique())
|
| 195 |
+
mantri_data = mantri_performance[mantri_performance['MANTRY_NAME'] == selected_mantri].iloc[0]
|
| 196 |
+
|
| 197 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 198 |
+
|
| 199 |
+
with col1:
|
| 200 |
+
st.metric("Mantri", mantri_data['MANTRY_NAME'])
|
| 201 |
+
with col2:
|
| 202 |
+
st.metric("Village", mantri_data['VILLAGE'])
|
| 203 |
+
with col3:
|
| 204 |
+
st.metric("Conversion Rate", f"{mantri_data['Conversion_Rate']}%")
|
| 205 |
+
with col4:
|
| 206 |
+
st.metric("Untapped Potential", int(mantri_data['Untapped_Potential']))
|
| 207 |
+
|
| 208 |
+
st.subheader("Mantri Details")
|
| 209 |
+
st.dataframe(mantri_data)
|
| 210 |
+
|
| 211 |
+
st.subheader("Action Recommendations")
|
| 212 |
+
if mantri_data['Conversion_Rate'] < 20:
|
| 213 |
+
st.error(f"**Send Marketing Team**: Conversion rate is low ({mantri_data['Conversion_Rate']}%). Need demos and awareness campaigns.")
|
| 214 |
+
if mantri_data['Untapped_Potential'] > 10:
|
| 215 |
+
st.warning(f"**Call Mantri**: {int(mantri_data['Untapped_Potential'])} farmers still not converted. Push Mantri to contact them.")
|
| 216 |
+
if mantri_data['Conversion_Rate'] > 50:
|
| 217 |
+
st.success(f"**Expand Success**: This mantri is performing well. Consider replicating their strategies.")
|
| 218 |
+
|
| 219 |
+
# Village Analysis
|
| 220 |
+
elif section == "Village Analysis":
|
| 221 |
+
st.header("Village Performance Analysis")
|
| 222 |
+
|
| 223 |
+
selected_village = st.selectbox("Select Village", village_performance['Village'].unique())
|
| 224 |
+
village_data = village_performance[village_performance['Village'] == selected_village].iloc[0]
|
| 225 |
+
|
| 226 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 227 |
+
|
| 228 |
+
with col1:
|
| 229 |
+
st.metric("Village", village_data['Village'])
|
| 230 |
+
with col2:
|
| 231 |
+
st.metric("Mantri", village_data['Mantri_Name'])
|
| 232 |
+
with col3:
|
| 233 |
+
st.metric("Total Sales (L)", village_data['Total_Sales'])
|
| 234 |
+
with col4:
|
| 235 |
+
st.metric("Days Since Last Sale", village_data['Days_Since_Last_Sale'])
|
| 236 |
+
|
| 237 |
+
st.subheader("Village Details")
|
| 238 |
+
st.dataframe(village_data)
|
| 239 |
+
|
| 240 |
+
st.subheader("Action Recommendations")
|
| 241 |
+
if village_data['Days_Since_Last_Sale'] > 30:
|
| 242 |
+
st.error(f"**Send Marketing Team**: No sales in {village_data['Days_Since_Last_Sale']} days. Need immediate attention.")
|
| 243 |
+
if village_data['Conversion_Rate'] < 25:
|
| 244 |
+
st.warning(f"**Low Conversion**: Only {village_data['Conversion_Rate']}% of potential customers are converted.")
|
| 245 |
+
if village_data['Total_Sales'] > 100:
|
| 246 |
+
st.success(f"**High Performer**: This village has high sales volume. Consider expanding product range.")
|
| 247 |
+
|
| 248 |
+
# Message Center
|
| 249 |
+
elif section == "Message Center":
|
| 250 |
+
st.header("Message Center")
|
| 251 |
+
|
| 252 |
+
st.subheader("Mantri Communication")
|
| 253 |
+
selected_mantri = st.selectbox("Select Mantri", mantri_performance['MANTRY_NAME'].unique())
|
| 254 |
+
mantri_data = mantri_performance[mantri_performance['MANTRY_NAME'] == selected_mantri].iloc[0]
|
| 255 |
+
|
| 256 |
+
st.write(f"**Village:** {mantri_data['VILLAGE']}")
|
| 257 |
+
st.write(f"**Conversion Rate:** {mantri_data['Conversion_Rate']}%")
|
| 258 |
+
st.write(f"**Untapped Potential:** {int(mantri_data['Untapped_Potential'])} farmers")
|
| 259 |
+
|
| 260 |
+
if mantri_data['Conversion_Rate'] < 20:
|
| 261 |
+
reason = "Low Conversion"
|
| 262 |
+
elif mantri_data['Untapped_Potential'] > 10:
|
| 263 |
+
reason = "High Potential"
|
| 264 |
+
else:
|
| 265 |
+
reason = "Good Performance"
|
| 266 |
+
|
| 267 |
+
message = get_mantri_message_template(
|
| 268 |
+
mantri_data['MANTRY_NAME'],
|
| 269 |
+
mantri_data['VILLAGE'],
|
| 270 |
+
reason,
|
| 271 |
+
mantri_data
|
| 272 |
+
)
|
| 273 |
+
|
| 274 |
+
st.text_area("Generated Message", message, height=200)
|
| 275 |
+
|
| 276 |
+
if st.button("Send to Mantri"):
|
| 277 |
+
st.success(f"Message sent to {mantri_data['MANTRY_NAME']} at {mantri_data['MOBILE_NO']}")
|
| 278 |
+
# Here you would integrate with WhatsApp API
|
| 279 |
+
|
| 280 |
+
st.subheader("Bulk Message Sender")
|
| 281 |
+
st.write("Send messages to multiple mantris at once")
|
| 282 |
+
|
| 283 |
+
options = st.multiselect("Select Mantris", mantri_performance['MANTRY_NAME'].unique())
|
| 284 |
+
message_template = st.text_area("Message Template", height=100)
|
| 285 |
+
|
| 286 |
+
if st.button("Send to Selected Mantris"):
|
| 287 |
+
progress_bar = st.progress(0)
|
| 288 |
+
for i, mantri in enumerate(options):
|
| 289 |
+
# Simulate sending
|
| 290 |
+
time.sleep(0.5)
|
| 291 |
+
progress_bar.progress((i + 1) / len(options))
|
| 292 |
+
st.success(f"Messages sent to {len(options)} mantris")
|
| 293 |
+
|
| 294 |
+
# Team Dispatch
|
| 295 |
+
elif section == "Team Dispatch":
|
| 296 |
+
st.header("Marketing Team Dispatch Planner")
|
| 297 |
+
|
| 298 |
+
st.subheader("Villages Needing Immediate Attention")
|
| 299 |
+
|
| 300 |
+
# Find villages with no recent sales or low conversion
|
| 301 |
+
high_priority = village_performance[
|
| 302 |
+
(village_performance['Days_Since_Last_Sale'] > 30) |
|
| 303 |
+
(village_performance['Conversion_Rate'] < 20)
|
| 304 |
+
]
|
| 305 |
+
|
| 306 |
+
if not high_priority.empty:
|
| 307 |
+
for _, village in high_priority.iterrows():
|
| 308 |
+
with st.expander(f"{village['Village']} (Last sale: {village['Days_Since_Last_Sale']} days ago)"):
|
| 309 |
+
st.write(f"**Mantri:** {village['Mantri_Name']} ({village['Mantri_Mobile']})")
|
| 310 |
+
st.write(f"**Conversion Rate:** {village['Conversion_Rate']}%")
|
| 311 |
+
st.write(f"**Recommended Action:** Conduct demo sessions and awareness campaign")
|
| 312 |
+
|
| 313 |
+
if st.button(f"Dispatch Team to {village['Village']}", key=f"dispatch_{village['Village']}"):
|
| 314 |
+
st.success(f"Team dispatched to {village['Village']}. Mantri {village['Mantri_Name']} has been notified.")
|
| 315 |
+
else:
|
| 316 |
+
st.info("No villages currently require immediate team dispatch.")
|
| 317 |
+
|
| 318 |
+
st.subheader("Create New Dispatch Plan")
|
| 319 |
+
|
| 320 |
+
col1, col2 = st.columns(2)
|
| 321 |
+
|
| 322 |
+
with col1:
|
| 323 |
+
selected_village = st.selectbox("Select Village for Dispatch", village_performance['Village'].unique())
|
| 324 |
+
village_data = village_performance[village_performance['Village'] == selected_village].iloc[0]
|
| 325 |
+
|
| 326 |
+
st.write(f"**Mantri:** {village_data['Mantri_Name']}")
|
| 327 |
+
st.write(f"**Last Sale:** {village_data['Days_Since_Last_Sale']} days ago")
|
| 328 |
+
st.write(f"**Conversion Rate:** {village_data['Conversion_Rate']}%")
|
| 329 |
+
|
| 330 |
+
with col2:
|
| 331 |
+
dispatch_date = st.date_input("Dispatch Date", datetime.now() + timedelta(days=1))
|
| 332 |
+
team_size = st.slider("Team Size", 1, 5, 2)
|
| 333 |
+
duration = st.selectbox("Duration", ["1 day", "2 days", "3 days", "1 week"])
|
| 334 |
+
|
| 335 |
+
objectives = st.text_area("Objectives", "Conduct demo sessions, educate farmers about benefits, collect feedback")
|
| 336 |
+
|
| 337 |
+
if st.button("Schedule Dispatch"):
|
| 338 |
+
st.success(f"Dispatch to {selected_village} scheduled for {dispatch_date}")
|
| 339 |
+
st.json({
|
| 340 |
+
"village": selected_village,
|
| 341 |
+
"mantri": village_data['Mantri_Name'],
|
| 342 |
+
"date": str(dispatch_date),
|
| 343 |
+
"team_size": team_size,
|
| 344 |
+
"duration": duration,
|
| 345 |
+
"objectives": objectives
|
| 346 |
+
})
|
| 347 |
+
|
| 348 |
+
# Footer
|
| 349 |
+
st.markdown("---")
|
| 350 |
+
st.markdown("**Calcium Supplement Sales Automation System** | For internal use only")
|
__pycache__/analytics.cpython-310.pyc
ADDED
|
Binary file (4.34 kB). View file
|
|
|
__pycache__/analytics.cpython-313.pyc
ADDED
|
Binary file (7.55 kB). View file
|
|
|
__pycache__/config.cpython-313.pyc
ADDED
|
Binary file (809 Bytes). View file
|
|
|
__pycache__/data_processor.cpython-310.pyc
ADDED
|
Binary file (20.7 kB). View file
|
|
|
__pycache__/data_processor.cpython-313.pyc
ADDED
|
Binary file (33.2 kB). View file
|
|
|
__pycache__/database.cpython-310.pyc
ADDED
|
Binary file (30.5 kB). View file
|
|
|
__pycache__/database.cpython-313.pyc
ADDED
|
Binary file (40.2 kB). View file
|
|
|
__pycache__/database_schema.cpython-313.pyc
ADDED
|
Binary file (3.47 kB). View file
|
|
|
__pycache__/excel_exporter.cpython-313.pyc
ADDED
|
Binary file (2.8 kB). View file
|
|
|
__pycache__/main.cpython-313.pyc
ADDED
|
Binary file (2.48 kB). View file
|
|
|
__pycache__/sales_manager.cpython-313.pyc
ADDED
|
Binary file (4.28 kB). View file
|
|
|
__pycache__/whatsapp_manager.cpython-310.pyc
ADDED
|
Binary file (11.2 kB). View file
|
|
|
analytics.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
|
| 5 |
+
class Analytics:
|
| 6 |
+
def __init__(self, db_manager):
|
| 7 |
+
self.db = db_manager
|
| 8 |
+
|
| 9 |
+
def get_sales_summary(self):
|
| 10 |
+
"""Get comprehensive sales summary statistics"""
|
| 11 |
+
try:
|
| 12 |
+
sales_df = self.db.get_dataframe('sales')
|
| 13 |
+
payments_df = self.db.get_dataframe('payments')
|
| 14 |
+
|
| 15 |
+
if sales_df.empty:
|
| 16 |
+
return {
|
| 17 |
+
'total_sales': 0,
|
| 18 |
+
'total_payments': 0,
|
| 19 |
+
'pending_amount': 0,
|
| 20 |
+
'total_transactions': 0,
|
| 21 |
+
'avg_sale_value': 0
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
total_sales = sales_df['total_amount'].sum()
|
| 25 |
+
total_payments = payments_df['amount'].sum() if not payments_df.empty else 0
|
| 26 |
+
pending_amount = total_sales - total_payments
|
| 27 |
+
|
| 28 |
+
return {
|
| 29 |
+
'total_sales': total_sales,
|
| 30 |
+
'total_payments': total_payments,
|
| 31 |
+
'pending_amount': pending_amount,
|
| 32 |
+
'total_transactions': len(sales_df),
|
| 33 |
+
'avg_sale_value': sales_df['total_amount'].mean()
|
| 34 |
+
}
|
| 35 |
+
except Exception as e:
|
| 36 |
+
return {
|
| 37 |
+
'total_sales': 0,
|
| 38 |
+
'total_payments': 0,
|
| 39 |
+
'pending_amount': 0,
|
| 40 |
+
'total_transactions': 0,
|
| 41 |
+
'avg_sale_value': 0
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
def get_customer_analysis(self):
|
| 45 |
+
"""Analyze customer data"""
|
| 46 |
+
try:
|
| 47 |
+
customers_df = self.db.get_dataframe('customers')
|
| 48 |
+
sales_df = self.db.get_dataframe('sales')
|
| 49 |
+
|
| 50 |
+
if customers_df.empty:
|
| 51 |
+
return {
|
| 52 |
+
'total_customers': 0,
|
| 53 |
+
'village_distribution': {},
|
| 54 |
+
'top_customers': {}
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
# Customer distribution by village
|
| 58 |
+
village_stats = customers_df['village'].value_counts().head(10)
|
| 59 |
+
|
| 60 |
+
# Top customers by spending
|
| 61 |
+
if not sales_df.empty:
|
| 62 |
+
customer_sales = sales_df.groupby('customer_id')['total_amount'].sum()
|
| 63 |
+
top_customers = customer_sales.nlargest(10)
|
| 64 |
+
else:
|
| 65 |
+
top_customers = pd.Series(dtype=float)
|
| 66 |
+
|
| 67 |
+
return {
|
| 68 |
+
'total_customers': len(customers_df),
|
| 69 |
+
'village_distribution': village_stats.to_dict(),
|
| 70 |
+
'top_customers': top_customers.to_dict()
|
| 71 |
+
}
|
| 72 |
+
except Exception as e:
|
| 73 |
+
return {
|
| 74 |
+
'total_customers': 0,
|
| 75 |
+
'village_distribution': {},
|
| 76 |
+
'top_customers': {}
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
def get_payment_analysis(self):
|
| 80 |
+
"""Analyze payment data"""
|
| 81 |
+
try:
|
| 82 |
+
pending_payments = self.db.get_pending_payments()
|
| 83 |
+
payments_df = self.db.get_dataframe('payments')
|
| 84 |
+
|
| 85 |
+
if pending_payments.empty:
|
| 86 |
+
return {
|
| 87 |
+
'total_pending': 0,
|
| 88 |
+
'customer_pending': {},
|
| 89 |
+
'payment_methods': {}
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
# Group by customer
|
| 93 |
+
customer_pending = pending_payments.groupby('customer_id')['pending_amount'].sum()
|
| 94 |
+
|
| 95 |
+
# Payment method distribution
|
| 96 |
+
if not payments_df.empty:
|
| 97 |
+
payment_methods = payments_df['payment_method'].value_counts()
|
| 98 |
+
else:
|
| 99 |
+
payment_methods = pd.Series(dtype=object)
|
| 100 |
+
|
| 101 |
+
return {
|
| 102 |
+
'total_pending': pending_payments['pending_amount'].sum(),
|
| 103 |
+
'customer_pending': customer_pending.to_dict(),
|
| 104 |
+
'payment_methods': payment_methods.to_dict()
|
| 105 |
+
}
|
| 106 |
+
except Exception as e:
|
| 107 |
+
return {
|
| 108 |
+
'total_pending': 0,
|
| 109 |
+
'customer_pending': {},
|
| 110 |
+
'payment_methods': {}
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
def get_demo_conversion_rates(self):
|
| 114 |
+
"""Calculate demo conversion rates"""
|
| 115 |
+
try:
|
| 116 |
+
demos_df = self.db.get_demo_conversions()
|
| 117 |
+
|
| 118 |
+
if demos_df.empty:
|
| 119 |
+
return {
|
| 120 |
+
'total_demos': 0,
|
| 121 |
+
'converted_demos': 0,
|
| 122 |
+
'conversion_rate': 0
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
total_demos = len(demos_df)
|
| 126 |
+
converted_demos = len(demos_df[demos_df['conversion_status'] == 'Converted'])
|
| 127 |
+
conversion_rate = (converted_demos / total_demos) * 100 if total_demos > 0 else 0
|
| 128 |
+
|
| 129 |
+
return {
|
| 130 |
+
'total_demos': total_demos,
|
| 131 |
+
'converted_demos': converted_demos,
|
| 132 |
+
'conversion_rate': conversion_rate
|
| 133 |
+
}
|
| 134 |
+
except Exception as e:
|
| 135 |
+
return {
|
| 136 |
+
'total_demos': 0,
|
| 137 |
+
'converted_demos': 0,
|
| 138 |
+
'conversion_rate': 0
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
def get_sales_trend(self):
|
| 142 |
+
"""Get sales trend data for charts"""
|
| 143 |
+
try:
|
| 144 |
+
sales_df = self.db.get_dataframe('sales')
|
| 145 |
+
|
| 146 |
+
if sales_df.empty:
|
| 147 |
+
return pd.DataFrame()
|
| 148 |
+
|
| 149 |
+
# Convert sale_date to datetime if it's not
|
| 150 |
+
sales_df['sale_date'] = pd.to_datetime(sales_df['sale_date'])
|
| 151 |
+
|
| 152 |
+
# Group by date
|
| 153 |
+
daily_sales = sales_df.groupby('sale_date')['total_amount'].sum().reset_index()
|
| 154 |
+
daily_sales = daily_sales.sort_values('sale_date')
|
| 155 |
+
|
| 156 |
+
return daily_sales
|
| 157 |
+
except Exception as e:
|
| 158 |
+
return pd.DataFrame()
|
| 159 |
+
|
| 160 |
+
def get_payment_distribution(self):
|
| 161 |
+
"""Get payment distribution for charts"""
|
| 162 |
+
try:
|
| 163 |
+
payments_df = self.db.get_dataframe('payments')
|
| 164 |
+
|
| 165 |
+
if payments_df.empty:
|
| 166 |
+
return pd.DataFrame()
|
| 167 |
+
|
| 168 |
+
payment_dist = payments_df.groupby('payment_method')['amount'].sum().reset_index()
|
| 169 |
+
return payment_dist
|
| 170 |
+
except Exception as e:
|
| 171 |
+
return pd.DataFrame()
|
| 172 |
+
|
| 173 |
+
def get_product_performance(self):
|
| 174 |
+
"""Get product performance data"""
|
| 175 |
+
try:
|
| 176 |
+
sale_items_df = self.db.get_dataframe('sale_items', '''
|
| 177 |
+
SELECT si.*, p.product_name
|
| 178 |
+
FROM sale_items si
|
| 179 |
+
JOIN products p ON si.product_id = p.product_id
|
| 180 |
+
''')
|
| 181 |
+
|
| 182 |
+
if sale_items_df.empty:
|
| 183 |
+
return pd.DataFrame()
|
| 184 |
+
|
| 185 |
+
product_perf = sale_items_df.groupby('product_name').agg({
|
| 186 |
+
'quantity': 'sum',
|
| 187 |
+
'amount': 'sum'
|
| 188 |
+
}).reset_index()
|
| 189 |
+
|
| 190 |
+
return product_perf
|
| 191 |
+
except Exception as e:
|
| 192 |
+
return pd.DataFrame()
|
automation.py
ADDED
|
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# enhanced_automation.py
|
| 2 |
+
import schedule
|
| 3 |
+
import time
|
| 4 |
+
import smtplib
|
| 5 |
+
from email.mime.text import MimeText
|
| 6 |
+
from email.mime.multipart import MimeMultipart
|
| 7 |
+
from datetime import datetime, timedelta
|
| 8 |
+
|
| 9 |
+
class AutomationManager:
|
| 10 |
+
def __init__(self, db_manager, whatsapp_manager):
|
| 11 |
+
self.db = db_manager
|
| 12 |
+
self.whatsapp = whatsapp_manager
|
| 13 |
+
|
| 14 |
+
def daily_payment_reminders(self):
|
| 15 |
+
"""Send payment reminders for overdue payments"""
|
| 16 |
+
overdue_payments = self.db.get_pending_payments()
|
| 17 |
+
|
| 18 |
+
for _, payment in overdue_payments.iterrows():
|
| 19 |
+
if payment['pending_amount'] > 0:
|
| 20 |
+
customer = self.db.get_dataframe('customers',
|
| 21 |
+
f"SELECT * FROM customers WHERE customer_id = {payment['customer_id']}")
|
| 22 |
+
|
| 23 |
+
if not customer.empty:
|
| 24 |
+
customer_data = customer.iloc[0]
|
| 25 |
+
message = f"""Hello {customer_data['name']},
|
| 26 |
+
|
| 27 |
+
This is a friendly reminder that your payment of βΉ{payment['pending_amount']:,.2f} for invoice {payment['invoice_no']} is overdue.
|
| 28 |
+
|
| 29 |
+
Please make the payment at your earliest convenience.
|
| 30 |
+
|
| 31 |
+
Thank you,
|
| 32 |
+
Sales Team"""
|
| 33 |
+
|
| 34 |
+
self.whatsapp.send_message(customer_data['mobile'], message)
|
| 35 |
+
|
| 36 |
+
def demo_followups(self):
|
| 37 |
+
"""Send follow-up messages for demos"""
|
| 38 |
+
upcoming_followups = self.db.get_dataframe('demos', '''
|
| 39 |
+
SELECT d.*, c.name as customer_name, c.mobile, p.product_name
|
| 40 |
+
FROM demos d
|
| 41 |
+
JOIN customers c ON d.customer_id = c.customer_id
|
| 42 |
+
JOIN products p ON d.product_id = p.product_id
|
| 43 |
+
WHERE d.follow_up_date = date('now')
|
| 44 |
+
AND d.conversion_status = 'Not Converted'
|
| 45 |
+
''')
|
| 46 |
+
|
| 47 |
+
for _, demo in upcoming_followups.iterrows():
|
| 48 |
+
message = f"""Hello {demo['customer_name']},
|
| 49 |
+
|
| 50 |
+
Following up on your demo of {demo['product_name']} on {demo['demo_date']}.
|
| 51 |
+
|
| 52 |
+
How was your experience? Would you like to place an order or need another demo?
|
| 53 |
+
|
| 54 |
+
Best regards,
|
| 55 |
+
Sales Team"""
|
| 56 |
+
|
| 57 |
+
self.whatsapp.send_message(demo['mobile'], message)
|
| 58 |
+
|
| 59 |
+
def weekly_performance_report(self):
|
| 60 |
+
"""Generate and send weekly performance report"""
|
| 61 |
+
analytics = Analytics(self.db)
|
| 62 |
+
|
| 63 |
+
sales_summary = analytics.get_sales_summary()
|
| 64 |
+
demo_stats = analytics.get_demo_conversion_rates()
|
| 65 |
+
payment_analysis = analytics.get_payment_analysis()
|
| 66 |
+
|
| 67 |
+
report = f"""
|
| 68 |
+
π WEEKLY PERFORMANCE REPORT
|
| 69 |
+
----------------------------
|
| 70 |
+
Total Sales: βΉ{sales_summary.get('total_sales', 0):,.2f}
|
| 71 |
+
Pending Payments: βΉ{sales_summary.get('pending_amount', 0):,.2f}
|
| 72 |
+
Demo Conversion Rate: {demo_stats.get('conversion_rate', 0):.1f}%
|
| 73 |
+
Total Customers: {analytics.get_customer_analysis().get('total_customers', 0)}
|
| 74 |
+
|
| 75 |
+
Generated on: {datetime.now().strftime('%Y-%m-%d %H:%M')}
|
| 76 |
+
"""
|
| 77 |
+
|
| 78 |
+
# You can extend this to email the report
|
| 79 |
+
self._save_report(report)
|
| 80 |
+
return report
|
| 81 |
+
|
| 82 |
+
def _save_report(self, report):
|
| 83 |
+
"""Save report to file"""
|
| 84 |
+
filename = f"reports/weekly_report_{datetime.now().strftime('%Y%m%d')}.txt"
|
| 85 |
+
os.makedirs('reports', exist_ok=True)
|
| 86 |
+
|
| 87 |
+
with open(filename, 'w') as f:
|
| 88 |
+
f.write(report)
|
components/__init__.py
ADDED
|
File without changes
|
components/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (143 Bytes). View file
|
|
|
components/__pycache__/database_status.cpython-310.pyc
ADDED
|
Binary file (985 Bytes). View file
|
|
|
components/database_status.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# components/database_status.py
|
| 2 |
+
import streamlit as st
|
| 3 |
+
|
| 4 |
+
def show_database_status(db):
|
| 5 |
+
"""Show current database status"""
|
| 6 |
+
st.sidebar.markdown("---")
|
| 7 |
+
st.sidebar.subheader("π Database Status")
|
| 8 |
+
|
| 9 |
+
try:
|
| 10 |
+
if db:
|
| 11 |
+
customers_count = len(db.get_dataframe('customers'))
|
| 12 |
+
sales_count = len(db.get_dataframe('sales'))
|
| 13 |
+
distributors_count = len(db.get_dataframe('distributors'))
|
| 14 |
+
payments_count = len(db.get_dataframe('payments'))
|
| 15 |
+
products_count = len(db.get_dataframe('products'))
|
| 16 |
+
|
| 17 |
+
st.sidebar.metric("π₯ Customers", customers_count)
|
| 18 |
+
st.sidebar.metric("π° Sales", sales_count)
|
| 19 |
+
st.sidebar.metric("π€ Distributors", distributors_count)
|
| 20 |
+
st.sidebar.metric("π³ Payments", payments_count)
|
| 21 |
+
st.sidebar.metric("π¦ Products", products_count)
|
| 22 |
+
else:
|
| 23 |
+
st.sidebar.error("Database not available")
|
| 24 |
+
|
| 25 |
+
except Exception as e:
|
| 26 |
+
st.sidebar.error("Database connection issue")
|
data/AMBERAVPURA ENGLISH SABHASAD LIST.xlsx
ADDED
|
Binary file (14 kB). View file
|
|
|
data/APRIL 24-25.xlsx
ADDED
|
Binary file (31.2 kB). View file
|
|
|
data/AUGUST 24-25.xlsx
ADDED
|
Binary file (33.1 kB). View file
|
|
|
data/JULY 24-25.xlsx
ADDED
|
Binary file (35.3 kB). View file
|
|
|
data/JUNE 24-25.xlsx
ADDED
|
Binary file (30 kB). View file
|
|
|
data/MAY 24-25.xlsx
ADDED
|
Binary file (29.3 kB). View file
|
|
|
data/SEPTEMBER 24-25.xlsx
ADDED
|
Binary file (42.1 kB). View file
|
|
|
data/amiyad.xlsx
ADDED
|
Binary file (41.6 kB). View file
|
|
|
data/dharkhuniya.xlsx
ADDED
|
Binary file (37.2 kB). View file
|
|
|
data/distributors.xlsx
ADDED
|
Binary file (26.3 kB). View file
|
|
|
data/kamrol.xlsx
ADDED
|
Binary file (39 kB). View file
|
|
|
data/sandha.xlsx
ADDED
|
Binary file (37.5 kB). View file
|
|
|
data/vishnoli.xlsx
ADDED
|
Binary file (38.2 kB). View file
|
|
|
data_processor.py
ADDED
|
@@ -0,0 +1,710 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
import logging
|
| 7 |
+
|
| 8 |
+
# Set up logging
|
| 9 |
+
logging.basicConfig(level=logging.INFO)
|
| 10 |
+
logger = logging.getLogger(__name__)
|
| 11 |
+
|
| 12 |
+
class DataProcessor:
|
| 13 |
+
def __init__(self, db_manager):
|
| 14 |
+
self.db = db_manager
|
| 15 |
+
self.product_mapping = self._create_product_mapping()
|
| 16 |
+
|
| 17 |
+
def _create_product_mapping(self):
|
| 18 |
+
"""Create product mapping from database"""
|
| 19 |
+
try:
|
| 20 |
+
products_df = self.db.get_dataframe('products')
|
| 21 |
+
return {row['product_name'].upper(): row['product_id'] for _, row in products_df.iterrows()}
|
| 22 |
+
except Exception as e:
|
| 23 |
+
logger.error(f"Error creating product mapping: {e}")
|
| 24 |
+
return {}
|
| 25 |
+
|
| 26 |
+
def process_excel_file(self, file_path):
|
| 27 |
+
"""Enhanced file processing with all data types"""
|
| 28 |
+
try:
|
| 29 |
+
file_name = os.path.basename(file_path)
|
| 30 |
+
print(f"π Processing file: {file_name}")
|
| 31 |
+
|
| 32 |
+
excel_file = pd.ExcelFile(file_path)
|
| 33 |
+
processed_sheets = 0
|
| 34 |
+
|
| 35 |
+
for sheet_name in excel_file.sheet_names:
|
| 36 |
+
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
| 37 |
+
df_clean = self._clean_dataframe(df)
|
| 38 |
+
|
| 39 |
+
print(f"\nπ Sheet: {sheet_name}")
|
| 40 |
+
print(f" Columns: {df_clean.columns.tolist()}")
|
| 41 |
+
|
| 42 |
+
# Check all types with priority
|
| 43 |
+
is_payment = self._is_payment_sheet(df_clean)
|
| 44 |
+
is_sales = self._is_sales_sheet(df_clean)
|
| 45 |
+
is_customer = self._is_customer_sheet(df_clean)
|
| 46 |
+
is_distributor = self._is_distributor_sheet(df_clean)
|
| 47 |
+
|
| 48 |
+
print(f" Detection - Payment: {is_payment}, Sales: {is_sales}, Customer: {is_customer}, Distributor: {is_distributor}")
|
| 49 |
+
|
| 50 |
+
processed = False
|
| 51 |
+
if is_payment:
|
| 52 |
+
processed = self.process_payment_sheet(df_clean, file_name, sheet_name)
|
| 53 |
+
elif is_sales:
|
| 54 |
+
processed = self.process_sales_sheet(df_clean, file_name, sheet_name)
|
| 55 |
+
elif is_distributor:
|
| 56 |
+
processed = self.process_distributor_sheet(df_clean, file_name, sheet_name)
|
| 57 |
+
elif is_customer:
|
| 58 |
+
processed = self.process_customer_sheet(df_clean, file_name, sheet_name)
|
| 59 |
+
|
| 60 |
+
if processed:
|
| 61 |
+
processed_sheets += 1
|
| 62 |
+
print(f" β
Successfully processed as detected type")
|
| 63 |
+
else:
|
| 64 |
+
print(f" β Failed to process")
|
| 65 |
+
|
| 66 |
+
print(f"\nπ File processing complete: {processed_sheets}/{len(excel_file.sheet_names)} sheets processed")
|
| 67 |
+
return processed_sheets > 0
|
| 68 |
+
|
| 69 |
+
except Exception as e:
|
| 70 |
+
print(f"π₯ Error processing file {file_path}: {e}")
|
| 71 |
+
return False
|
| 72 |
+
|
| 73 |
+
def _clean_dataframe(self, df):
|
| 74 |
+
"""Clean and prepare dataframe for processing"""
|
| 75 |
+
# Remove completely empty rows and columns
|
| 76 |
+
df = df.dropna(how='all').dropna(axis=1, how='all')
|
| 77 |
+
|
| 78 |
+
# Reset index
|
| 79 |
+
df = df.reset_index(drop=True)
|
| 80 |
+
|
| 81 |
+
# Convert column names to string and clean them
|
| 82 |
+
df.columns = [str(col).strip().upper() for col in df.columns]
|
| 83 |
+
|
| 84 |
+
return df
|
| 85 |
+
|
| 86 |
+
def _is_sales_sheet(self, df):
|
| 87 |
+
"""Check if sheet contains sales data"""
|
| 88 |
+
required_columns = ['INVOICE', 'CUSTOMER', 'PRODUCT', 'QUANTITY', 'AMOUNT']
|
| 89 |
+
existing_columns = [col for col in df.columns if any(req in col for req in required_columns)]
|
| 90 |
+
return len(existing_columns) >= 3
|
| 91 |
+
|
| 92 |
+
def _is_customer_sheet(self, df):
|
| 93 |
+
"""Check if sheet contains customer data"""
|
| 94 |
+
required_columns = ['CUSTOMER', 'NAME', 'MOBILE', 'VILLAGE']
|
| 95 |
+
existing_columns = [col for col in df.columns if any(req in col for req in required_columns)]
|
| 96 |
+
return len(existing_columns) >= 2
|
| 97 |
+
|
| 98 |
+
def _is_distributor_sheet(self, df):
|
| 99 |
+
"""Check if sheet contains distributor data"""
|
| 100 |
+
required_columns = ['DISTRIBUTOR', 'MANTRI', 'SABHASAD']
|
| 101 |
+
existing_columns = [col for col in df.columns if any(req in col for req in required_columns)]
|
| 102 |
+
return len(existing_columns) >= 2
|
| 103 |
+
|
| 104 |
+
def process_sales_sheet(self, df, file_name, sheet_name):
|
| 105 |
+
"""Process sales data from sheet"""
|
| 106 |
+
try:
|
| 107 |
+
processed_rows = 0
|
| 108 |
+
|
| 109 |
+
for index, row in df.iterrows():
|
| 110 |
+
try:
|
| 111 |
+
# Skip header rows and empty rows
|
| 112 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 113 |
+
continue
|
| 114 |
+
|
| 115 |
+
# Extract sales data (adjust column indices based on your Excel structure)
|
| 116 |
+
invoice_no = str(row.iloc[0]) if len(row) > 0 else f"INV_{datetime.now().strftime('%Y%m%d%H%M%S')}_{index}"
|
| 117 |
+
customer_name = str(row.iloc[1]) if len(row) > 1 else "Unknown Customer"
|
| 118 |
+
product_name = str(row.iloc[2]) if len(row) > 2 else "Unknown Product"
|
| 119 |
+
quantity = self._safe_float(row.iloc[3]) if len(row) > 3 else 0
|
| 120 |
+
amount = self._safe_float(row.iloc[4]) if len(row) > 4 else 0
|
| 121 |
+
|
| 122 |
+
# Get or create customer
|
| 123 |
+
customer_id = self._get_or_create_customer(customer_name, "", "", "", "")
|
| 124 |
+
|
| 125 |
+
# Get product ID
|
| 126 |
+
product_id = self._get_product_id(product_name)
|
| 127 |
+
|
| 128 |
+
if customer_id and product_id and quantity > 0:
|
| 129 |
+
# Create sale
|
| 130 |
+
sale_date = datetime.now().date()
|
| 131 |
+
sale_items = [{
|
| 132 |
+
'product_id': product_id,
|
| 133 |
+
'quantity': quantity,
|
| 134 |
+
'rate': amount / quantity if quantity > 0 else 0
|
| 135 |
+
}]
|
| 136 |
+
|
| 137 |
+
self.db.add_sale(invoice_no, customer_id, sale_date, sale_items)
|
| 138 |
+
processed_rows += 1
|
| 139 |
+
|
| 140 |
+
except Exception as e:
|
| 141 |
+
logger.warning(f"Error processing row {index} in sales sheet: {e}")
|
| 142 |
+
continue
|
| 143 |
+
|
| 144 |
+
logger.info(f"Processed {processed_rows} sales from {sheet_name}")
|
| 145 |
+
return processed_rows > 0
|
| 146 |
+
|
| 147 |
+
except Exception as e:
|
| 148 |
+
logger.error(f"Error processing sales sheet: {e}")
|
| 149 |
+
return False
|
| 150 |
+
|
| 151 |
+
def process_customer_sheet(self, df, file_name, sheet_name):
|
| 152 |
+
"""Process customer data from sheet with duplicate handling"""
|
| 153 |
+
try:
|
| 154 |
+
processed_rows = 0
|
| 155 |
+
duplicate_rows = 0
|
| 156 |
+
error_rows = 0
|
| 157 |
+
|
| 158 |
+
print(f"π Processing customer sheet: {sheet_name} with {len(df)} rows")
|
| 159 |
+
|
| 160 |
+
for index, row in df.iterrows():
|
| 161 |
+
try:
|
| 162 |
+
# Skip header rows and empty rows
|
| 163 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 164 |
+
continue
|
| 165 |
+
|
| 166 |
+
# Extract customer data
|
| 167 |
+
customer_code = str(row.iloc[0]) if len(row) > 0 and pd.notna(row.iloc[0]) else None
|
| 168 |
+
name = str(row.iloc[1]) if len(row) > 1 and pd.notna(row.iloc[1]) else "Unknown"
|
| 169 |
+
mobile = str(row.iloc[2]) if len(row) > 2 and pd.notna(row.iloc[2]) else ""
|
| 170 |
+
|
| 171 |
+
# Extract location - adjust indices based on your Excel structure
|
| 172 |
+
village = str(row.iloc[3]) if len(row) > 3 and pd.notna(row.iloc[3]) else ""
|
| 173 |
+
taluka = str(row.iloc[4]) if len(row) > 4 and pd.notna(row.iloc[4]) else ""
|
| 174 |
+
district = str(row.iloc[5]) if len(row) > 5 and pd.notna(row.iloc[5]) else ""
|
| 175 |
+
|
| 176 |
+
# If village is combined with name, split them
|
| 177 |
+
if not village and "(" in name:
|
| 178 |
+
name_parts = name.split("(")
|
| 179 |
+
if len(name_parts) > 1:
|
| 180 |
+
name = name_parts[0].strip()
|
| 181 |
+
village = name_parts[1].replace(")", "").strip()
|
| 182 |
+
|
| 183 |
+
# Skip if no name
|
| 184 |
+
if not name or name == "Unknown":
|
| 185 |
+
continue
|
| 186 |
+
|
| 187 |
+
# Add customer to database (method now handles duplicates)
|
| 188 |
+
customer_id = self.db.add_customer(name, mobile, village, taluka, district, customer_code)
|
| 189 |
+
|
| 190 |
+
if customer_id and customer_id != -1:
|
| 191 |
+
processed_rows += 1
|
| 192 |
+
if processed_rows % 50 == 0: # Progress update
|
| 193 |
+
print(f"π Processed {processed_rows} customers...")
|
| 194 |
+
else:
|
| 195 |
+
duplicate_rows += 1
|
| 196 |
+
|
| 197 |
+
except Exception as e:
|
| 198 |
+
error_rows += 1
|
| 199 |
+
if error_rows <= 5: # Only log first few errors
|
| 200 |
+
print(f"β Error in row {index}: {e}")
|
| 201 |
+
continue
|
| 202 |
+
|
| 203 |
+
print(f"π Customer processing complete: {processed_rows} added, {duplicate_rows} duplicates, {error_rows} errors")
|
| 204 |
+
return processed_rows > 0
|
| 205 |
+
|
| 206 |
+
except Exception as e:
|
| 207 |
+
print(f"π₯ Error processing customer sheet: {e}")
|
| 208 |
+
return False
|
| 209 |
+
|
| 210 |
+
def process_distributor_sheet(self, df, file_name, sheet_name):
|
| 211 |
+
"""Process distributor data from sheet"""
|
| 212 |
+
try:
|
| 213 |
+
processed_rows = 0
|
| 214 |
+
|
| 215 |
+
# Clean the dataframe - convert column names to consistent format
|
| 216 |
+
df.columns = [str(col).strip().upper() for col in df.columns]
|
| 217 |
+
print(f"DEBUG: Processing distributor sheet with columns: {df.columns.tolist()}")
|
| 218 |
+
|
| 219 |
+
for index, row in df.iterrows():
|
| 220 |
+
try:
|
| 221 |
+
# Skip header rows and empty rows
|
| 222 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 223 |
+
print(f"DEBUG: Skipping row {index} - header or empty")
|
| 224 |
+
continue
|
| 225 |
+
|
| 226 |
+
print(f"DEBUG: Processing row {index}")
|
| 227 |
+
|
| 228 |
+
# Extract distributor data based on YOUR ACTUAL COLUMNS
|
| 229 |
+
# Map your Excel columns to database fields
|
| 230 |
+
name = self._extract_distributor_name(row) # We'll use Village + Taluka as name
|
| 231 |
+
village = self._safe_get(row, 'Village', 1)
|
| 232 |
+
taluka = self._safe_get(row, 'Taluka', 2)
|
| 233 |
+
district = self._safe_get(row, 'District', 3)
|
| 234 |
+
mantri_name = self._safe_get(row, 'Mantri_Name', 4)
|
| 235 |
+
mantri_mobile = self._safe_get(row, 'Mantri_Mobile', 5)
|
| 236 |
+
sabhasad_count = self._safe_get_int(row, 'Sabhasad', 6)
|
| 237 |
+
contact_in_group = self._safe_get_int(row, 'Contact_In_Group', 7)
|
| 238 |
+
|
| 239 |
+
print(f"DEBUG: Extracted - Village: {village}, Taluka: {taluka}, Mantri: {mantri_name}")
|
| 240 |
+
|
| 241 |
+
# Validate we have essential data
|
| 242 |
+
if not village or not taluka:
|
| 243 |
+
print(f"DEBUG: Skipping - missing village or taluka")
|
| 244 |
+
continue
|
| 245 |
+
|
| 246 |
+
# Create distributor name from village + taluka
|
| 247 |
+
if not name:
|
| 248 |
+
name = f"{village} - {taluka}"
|
| 249 |
+
|
| 250 |
+
# Add distributor to database with ALL fields
|
| 251 |
+
self.db.execute_query('''
|
| 252 |
+
INSERT OR REPLACE INTO distributors
|
| 253 |
+
(name, village, taluka, district, mantri_name, mantri_mobile, sabhasad_count, contact_in_group)
|
| 254 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
| 255 |
+
''', (name, village, taluka, district, mantri_name, mantri_mobile, sabhasad_count, contact_in_group))
|
| 256 |
+
|
| 257 |
+
processed_rows += 1
|
| 258 |
+
print(f"DEBUG: Successfully added distributor: {name}")
|
| 259 |
+
|
| 260 |
+
except Exception as e:
|
| 261 |
+
logger.warning(f"Error processing row {index} in distributor sheet: {e}")
|
| 262 |
+
continue
|
| 263 |
+
|
| 264 |
+
logger.info(f"Processed {processed_rows} distributors from {sheet_name}")
|
| 265 |
+
return processed_rows > 0
|
| 266 |
+
|
| 267 |
+
except Exception as e:
|
| 268 |
+
logger.error(f"Error processing distributor sheet: {e}")
|
| 269 |
+
return False
|
| 270 |
+
|
| 271 |
+
def _extract_distributor_name(self, row):
|
| 272 |
+
"""Extract distributor name from village and taluka"""
|
| 273 |
+
village = self._safe_get(row, 'Village', 1)
|
| 274 |
+
taluka = self._safe_get(row, 'Taluka', 2)
|
| 275 |
+
|
| 276 |
+
if village and taluka:
|
| 277 |
+
return f"{village} - {taluka}"
|
| 278 |
+
elif village:
|
| 279 |
+
return village
|
| 280 |
+
elif taluka:
|
| 281 |
+
return taluka
|
| 282 |
+
else:
|
| 283 |
+
return "Unknown Distributor"
|
| 284 |
+
|
| 285 |
+
def _safe_get(self, row, column_name, default_index):
|
| 286 |
+
"""Safely get value from row by column name or index"""
|
| 287 |
+
try:
|
| 288 |
+
# Try by column name first
|
| 289 |
+
if column_name in row.index:
|
| 290 |
+
value = row[column_name]
|
| 291 |
+
if pd.isna(value):
|
| 292 |
+
return ""
|
| 293 |
+
return str(value).strip()
|
| 294 |
+
|
| 295 |
+
# Fallback to index
|
| 296 |
+
if len(row) > default_index:
|
| 297 |
+
value = row.iloc[default_index]
|
| 298 |
+
if pd.isna(value):
|
| 299 |
+
return ""
|
| 300 |
+
return str(value).strip()
|
| 301 |
+
|
| 302 |
+
return ""
|
| 303 |
+
except Exception:
|
| 304 |
+
return ""
|
| 305 |
+
|
| 306 |
+
def _safe_get_int(self, row, column_name, default_index):
|
| 307 |
+
"""Safely get integer value from row"""
|
| 308 |
+
try:
|
| 309 |
+
str_value = self._safe_get(row, column_name, default_index)
|
| 310 |
+
if str_value and str_value.strip():
|
| 311 |
+
return int(float(str_value)) # Handle both int and float strings
|
| 312 |
+
return 0
|
| 313 |
+
except (ValueError, TypeError):
|
| 314 |
+
return 0
|
| 315 |
+
|
| 316 |
+
def _is_header_row(self, row):
|
| 317 |
+
"""Check if row is a header row - updated for your data"""
|
| 318 |
+
if len(row) == 0:
|
| 319 |
+
return True
|
| 320 |
+
|
| 321 |
+
first_value = str(row.iloc[0]) if pd.notna(row.iloc[0]) else ""
|
| 322 |
+
first_value_upper = first_value.upper()
|
| 323 |
+
|
| 324 |
+
# Header indicators for YOUR data
|
| 325 |
+
header_indicators = [
|
| 326 |
+
'DATE', 'VILLAGE', 'TALUKA', 'DISTRICT', 'MANTRI',
|
| 327 |
+
'SABHASAD', 'CONTACT', 'TOTAL', 'SR', 'NO', 'NAME'
|
| 328 |
+
]
|
| 329 |
+
|
| 330 |
+
# If first value contains any header indicator, it's likely a header
|
| 331 |
+
return any(indicator in first_value_upper for indicator in header_indicators)
|
| 332 |
+
|
| 333 |
+
def _safe_float(self, value):
|
| 334 |
+
"""Safely convert value to float"""
|
| 335 |
+
try:
|
| 336 |
+
if pd.isna(value):
|
| 337 |
+
return 0.0
|
| 338 |
+
return float(value)
|
| 339 |
+
except (ValueError, TypeError):
|
| 340 |
+
return 0.0
|
| 341 |
+
|
| 342 |
+
def _get_or_create_customer(self, name, mobile, village, taluka, district):
|
| 343 |
+
"""Get existing customer or create new one"""
|
| 344 |
+
try:
|
| 345 |
+
# Check if customer exists
|
| 346 |
+
result = self.db.execute_query(
|
| 347 |
+
'SELECT customer_id FROM customers WHERE name = ? AND mobile = ?',
|
| 348 |
+
(name, mobile)
|
| 349 |
+
)
|
| 350 |
+
|
| 351 |
+
if result:
|
| 352 |
+
return result[0][0]
|
| 353 |
+
else:
|
| 354 |
+
# Create new customer
|
| 355 |
+
customer_code = f"CUST_{datetime.now().strftime('%Y%m%d%H%M%S')}"
|
| 356 |
+
self.db.add_customer(name, mobile, village, taluka, district, customer_code)
|
| 357 |
+
|
| 358 |
+
# Get the new customer ID
|
| 359 |
+
result = self.db.execute_query(
|
| 360 |
+
'SELECT customer_id FROM customers WHERE customer_code = ?',
|
| 361 |
+
(customer_code,)
|
| 362 |
+
)
|
| 363 |
+
return result[0][0] if result else None
|
| 364 |
+
|
| 365 |
+
except Exception as e:
|
| 366 |
+
logger.error(f"Error getting/creating customer: {e}")
|
| 367 |
+
return None
|
| 368 |
+
|
| 369 |
+
def _get_product_id(self, product_name):
|
| 370 |
+
"""Get product ID from product name"""
|
| 371 |
+
clean_name = product_name.upper().strip()
|
| 372 |
+
return self.product_mapping.get(clean_name, None)
|
| 373 |
+
|
| 374 |
+
def _extract_location_from_name(self, name):
|
| 375 |
+
"""Extract village and taluka from customer name"""
|
| 376 |
+
name_upper = name.upper()
|
| 377 |
+
|
| 378 |
+
locations = {
|
| 379 |
+
'AMIYAD': ('Amiyad', ''),
|
| 380 |
+
'AMVAD': ('Amvad', ''),
|
| 381 |
+
'ANKALAV': ('', 'Ankalav'),
|
| 382 |
+
'PETLAD': ('', 'Petlad'),
|
| 383 |
+
'BORSAD': ('', 'Borsad'),
|
| 384 |
+
'VADODARA': ('', 'Vadodara'),
|
| 385 |
+
'ANAND': ('', 'Anand'),
|
| 386 |
+
'NADIAD': ('', 'Nadiad')
|
| 387 |
+
}
|
| 388 |
+
|
| 389 |
+
village, taluka = "", ""
|
| 390 |
+
for location, (v, t) in locations.items():
|
| 391 |
+
if location in name_upper:
|
| 392 |
+
if v:
|
| 393 |
+
village = v
|
| 394 |
+
if t:
|
| 395 |
+
taluka = t
|
| 396 |
+
break
|
| 397 |
+
|
| 398 |
+
return village, taluka
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
# Add to DataProcessor class in data_processor.py
|
| 403 |
+
|
| 404 |
+
def _is_sales_sheet(self, df):
|
| 405 |
+
"""Enhanced sales sheet detection with better logging"""
|
| 406 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 407 |
+
|
| 408 |
+
print(f"\nπ ENHANCED SALES DETECTION:")
|
| 409 |
+
print(f" All columns: {columns_lower}")
|
| 410 |
+
|
| 411 |
+
sales_indicators = [
|
| 412 |
+
'invoice', 'sale', 'amount', 'product', 'quantity', 'rate',
|
| 413 |
+
'total', 'price', 'bill', 'payment', 'item', 'qty'
|
| 414 |
+
]
|
| 415 |
+
|
| 416 |
+
found_indicators = []
|
| 417 |
+
for indicator in sales_indicators:
|
| 418 |
+
matching_cols = [col for col in columns_lower if indicator in col]
|
| 419 |
+
if matching_cols:
|
| 420 |
+
found_indicators.append((indicator, matching_cols))
|
| 421 |
+
|
| 422 |
+
print(f" Found sales indicators: {found_indicators}")
|
| 423 |
+
|
| 424 |
+
score = len(found_indicators)
|
| 425 |
+
print(f" Sales detection score: {score}")
|
| 426 |
+
|
| 427 |
+
return score >= 2
|
| 428 |
+
|
| 429 |
+
def process_sales_sheet(self, df, file_name, sheet_name):
|
| 430 |
+
"""Enhanced sales data processing with better logging"""
|
| 431 |
+
try:
|
| 432 |
+
processed_rows = 0
|
| 433 |
+
print(f"π Processing sales sheet: {sheet_name} with {len(df)} rows")
|
| 434 |
+
|
| 435 |
+
for index, row in df.iterrows():
|
| 436 |
+
try:
|
| 437 |
+
# Skip header rows and empty rows
|
| 438 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 439 |
+
continue
|
| 440 |
+
|
| 441 |
+
print(f"π§ Processing row {index}")
|
| 442 |
+
|
| 443 |
+
# Extract sales data with flexible column mapping
|
| 444 |
+
invoice_no = self._extract_sales_value(row, 'invoice', 0, f"INV_{datetime.now().strftime('%Y%m%d%H%M%S')}_{index}")
|
| 445 |
+
customer_name = self._extract_sales_value(row, 'customer', 1, "Unknown Customer")
|
| 446 |
+
product_name = self._extract_sales_value(row, 'product', 2, "Unknown Product")
|
| 447 |
+
quantity = self._safe_float(self._extract_sales_value(row, 'quantity', 3, 0))
|
| 448 |
+
amount = self._safe_float(self._extract_sales_value(row, 'amount', 4, 0))
|
| 449 |
+
|
| 450 |
+
print(f" Extracted - Invoice: '{invoice_no}', Customer: '{customer_name}', Product: '{product_name}', Qty: {quantity}, Amount: {amount}")
|
| 451 |
+
|
| 452 |
+
# Validate essential data
|
| 453 |
+
if not customer_name or customer_name == "Unknown Customer":
|
| 454 |
+
print(f" β οΈ Skipping - invalid customer name")
|
| 455 |
+
continue
|
| 456 |
+
|
| 457 |
+
if quantity <= 0:
|
| 458 |
+
print(f" β οΈ Skipping - invalid quantity: {quantity}")
|
| 459 |
+
continue
|
| 460 |
+
|
| 461 |
+
if amount <= 0:
|
| 462 |
+
print(f" β οΈ Skipping - invalid amount: {amount}")
|
| 463 |
+
continue
|
| 464 |
+
|
| 465 |
+
# Get or create customer
|
| 466 |
+
customer_id = self._get_or_create_customer(customer_name, "", "", "", "")
|
| 467 |
+
if not customer_id:
|
| 468 |
+
print(f" β οΈ Skipping - could not get/create customer")
|
| 469 |
+
continue
|
| 470 |
+
|
| 471 |
+
# Get product ID
|
| 472 |
+
product_id = self._get_product_id(product_name)
|
| 473 |
+
if not product_id:
|
| 474 |
+
print(f" β οΈ Skipping - product not found: '{product_name}'")
|
| 475 |
+
print(f" Available products: {list(self.product_mapping.keys())}")
|
| 476 |
+
continue
|
| 477 |
+
|
| 478 |
+
# Calculate rate
|
| 479 |
+
rate = amount / quantity if quantity > 0 else 0
|
| 480 |
+
|
| 481 |
+
# Create sale items
|
| 482 |
+
sale_date = datetime.now().date()
|
| 483 |
+
sale_items = [{
|
| 484 |
+
'product_id': product_id,
|
| 485 |
+
'quantity': quantity,
|
| 486 |
+
'rate': rate
|
| 487 |
+
}]
|
| 488 |
+
|
| 489 |
+
# Generate proper invoice number
|
| 490 |
+
if not invoice_no or invoice_no.startswith('INV_'):
|
| 491 |
+
invoice_no = self.db.generate_invoice_number()
|
| 492 |
+
|
| 493 |
+
print(f" Creating sale - Customer ID: {customer_id}, Product ID: {product_id}")
|
| 494 |
+
|
| 495 |
+
# Add sale to database
|
| 496 |
+
sale_id = self.db.add_sale(invoice_no, customer_id, sale_date, sale_items)
|
| 497 |
+
|
| 498 |
+
if sale_id and sale_id > 0:
|
| 499 |
+
processed_rows += 1
|
| 500 |
+
print(f" β
Successfully created sale ID: {sale_id}")
|
| 501 |
+
else:
|
| 502 |
+
print(f" β Failed to create sale")
|
| 503 |
+
|
| 504 |
+
except Exception as e:
|
| 505 |
+
print(f" β Error in row {index}: {e}")
|
| 506 |
+
import traceback
|
| 507 |
+
traceback.print_exc()
|
| 508 |
+
continue
|
| 509 |
+
|
| 510 |
+
print(f"π Processed {processed_rows} sales from {sheet_name}")
|
| 511 |
+
return processed_rows > 0
|
| 512 |
+
|
| 513 |
+
except Exception as e:
|
| 514 |
+
print(f"π₯ Error processing sales sheet: {e}")
|
| 515 |
+
import traceback
|
| 516 |
+
traceback.print_exc()
|
| 517 |
+
return False
|
| 518 |
+
def _extract_sales_value(self, row, field_name, default_index, default_value):
|
| 519 |
+
"""Extract sales values with flexible column matching"""
|
| 520 |
+
# Try to find column by name
|
| 521 |
+
for col_name in row.index:
|
| 522 |
+
if field_name in str(col_name).lower():
|
| 523 |
+
value = row[col_name]
|
| 524 |
+
if pd.notna(value):
|
| 525 |
+
return str(value).strip()
|
| 526 |
+
|
| 527 |
+
# Fallback to index
|
| 528 |
+
if len(row) > default_index:
|
| 529 |
+
value = row.iloc[default_index]
|
| 530 |
+
if pd.notna(value):
|
| 531 |
+
return str(value).strip()
|
| 532 |
+
|
| 533 |
+
return default_value
|
| 534 |
+
|
| 535 |
+
# Add to DataProcessor class
|
| 536 |
+
|
| 537 |
+
def _is_payment_sheet(self, df):
|
| 538 |
+
"""Detect payment sheets"""
|
| 539 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 540 |
+
|
| 541 |
+
payment_indicators = [
|
| 542 |
+
'payment', 'paid', 'amount', 'invoice', 'date', 'method',
|
| 543 |
+
'cash', 'gpay', 'cheque', 'bank', 'rrn', 'reference'
|
| 544 |
+
]
|
| 545 |
+
|
| 546 |
+
score = sum(1 for indicator in payment_indicators
|
| 547 |
+
if any(indicator in col for col in columns_lower))
|
| 548 |
+
|
| 549 |
+
print(f"π Payment detection - Score: {score}, Columns: {columns_lower}")
|
| 550 |
+
return score >= 2
|
| 551 |
+
|
| 552 |
+
def process_payment_sheet(self, df, file_name, sheet_name):
|
| 553 |
+
"""Process payment data from sheet"""
|
| 554 |
+
try:
|
| 555 |
+
processed_rows = 0
|
| 556 |
+
print(f"π Processing payment sheet: {sheet_name}")
|
| 557 |
+
|
| 558 |
+
for index, row in df.iterrows():
|
| 559 |
+
try:
|
| 560 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 561 |
+
continue
|
| 562 |
+
|
| 563 |
+
# Extract payment data
|
| 564 |
+
invoice_no = self._extract_sales_value(row, 'invoice', 0, "")
|
| 565 |
+
amount = self._safe_float(self._extract_sales_value(row, 'amount', 1, 0))
|
| 566 |
+
payment_date = self._extract_sales_value(row, 'date', 2, datetime.now().date())
|
| 567 |
+
payment_method = self._extract_sales_value(row, 'method', 3, "Cash")
|
| 568 |
+
|
| 569 |
+
if invoice_no and amount > 0:
|
| 570 |
+
# Find sale by invoice number
|
| 571 |
+
sale_result = self.db.execute_query(
|
| 572 |
+
'SELECT sale_id FROM sales WHERE invoice_no = ?',
|
| 573 |
+
(invoice_no,),
|
| 574 |
+
log_action=False
|
| 575 |
+
)
|
| 576 |
+
|
| 577 |
+
if sale_result:
|
| 578 |
+
sale_id = sale_result[0][0]
|
| 579 |
+
|
| 580 |
+
# Add payment
|
| 581 |
+
self.db.execute_query('''
|
| 582 |
+
INSERT INTO payments (sale_id, payment_date, payment_method, amount)
|
| 583 |
+
VALUES (?, ?, ?, ?)
|
| 584 |
+
''', (sale_id, payment_date, payment_method, amount))
|
| 585 |
+
|
| 586 |
+
processed_rows += 1
|
| 587 |
+
print(f"β
Processed payment for invoice {invoice_no}")
|
| 588 |
+
|
| 589 |
+
except Exception as e:
|
| 590 |
+
print(f"β Error processing payment row {index}: {e}")
|
| 591 |
+
continue
|
| 592 |
+
|
| 593 |
+
print(f"π Processed {processed_rows} payments from {sheet_name}")
|
| 594 |
+
return processed_rows > 0
|
| 595 |
+
|
| 596 |
+
except Exception as e:
|
| 597 |
+
print(f"π₯ Error processing payment sheet: {e}")
|
| 598 |
+
return False
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
def _is_customer_sheet(self, df):
|
| 602 |
+
"""Check if sheet contains customer data - IMPROVED"""
|
| 603 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 604 |
+
|
| 605 |
+
customer_indicators = [
|
| 606 |
+
'customer', 'name', 'mobile', 'phone', 'village', 'taluka',
|
| 607 |
+
'district', 'code', 'contact'
|
| 608 |
+
]
|
| 609 |
+
|
| 610 |
+
score = sum(1 for indicator in customer_indicators
|
| 611 |
+
if any(indicator in col for col in columns_lower))
|
| 612 |
+
|
| 613 |
+
print(f"π Customer sheet detection - Score: {score}, Columns: {columns_lower}")
|
| 614 |
+
return score >= 2
|
| 615 |
+
|
| 616 |
+
def _is_distributor_sheet(self, df):
|
| 617 |
+
"""Enhanced distributor sheet detection with better logging"""
|
| 618 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 619 |
+
|
| 620 |
+
print(f"\nπ ENHANCED DISTRIBUTOR DETECTION:")
|
| 621 |
+
print(f" All columns: {columns_lower}")
|
| 622 |
+
|
| 623 |
+
distributor_indicators = [
|
| 624 |
+
'distributor', 'mantri', 'sabhasad', 'contact_in_group',
|
| 625 |
+
'village', 'taluka', 'district', 'leader', 'team', 'sabh'
|
| 626 |
+
]
|
| 627 |
+
|
| 628 |
+
found_indicators = []
|
| 629 |
+
for indicator in distributor_indicators:
|
| 630 |
+
matching_cols = [col for col in columns_lower if indicator in col]
|
| 631 |
+
if matching_cols:
|
| 632 |
+
found_indicators.append((indicator, matching_cols))
|
| 633 |
+
|
| 634 |
+
print(f" Found indicators: {found_indicators}")
|
| 635 |
+
|
| 636 |
+
score = len(found_indicators)
|
| 637 |
+
print(f" Detection score: {score}")
|
| 638 |
+
|
| 639 |
+
# More flexible detection - lower threshold
|
| 640 |
+
return score >= 1 # Even if we find just one indicator, try processing
|
| 641 |
+
|
| 642 |
+
def process_single_sheet(self, df, sheet_name, file_name):
|
| 643 |
+
"""Process a single sheet with detailed logging"""
|
| 644 |
+
print(f"π Processing sheet: {sheet_name} from {file_name}")
|
| 645 |
+
|
| 646 |
+
if self._is_sales_sheet(df):
|
| 647 |
+
print("β
Detected as SALES sheet")
|
| 648 |
+
return self.process_sales_sheet(df, file_name, sheet_name)
|
| 649 |
+
elif self._is_customer_sheet(df):
|
| 650 |
+
print("β
Detected as CUSTOMER sheet")
|
| 651 |
+
return self.process_customer_sheet(df, file_name, sheet_name)
|
| 652 |
+
elif self._is_distributor_sheet(df):
|
| 653 |
+
print("β
Detected as DISTRIBUTOR sheet")
|
| 654 |
+
return self.process_distributor_sheet(df, file_name, sheet_name)
|
| 655 |
+
else:
|
| 656 |
+
print("β Unknown sheet type - trying customer processing as fallback")
|
| 657 |
+
return self.process_customer_sheet(df, file_name, sheet_name)
|
| 658 |
+
|
| 659 |
+
def process_excel_file(self, file_path):
|
| 660 |
+
"""Enhanced file processing with all data types"""
|
| 661 |
+
try:
|
| 662 |
+
file_name = os.path.basename(file_path)
|
| 663 |
+
print(f"π Processing file: {file_name}")
|
| 664 |
+
|
| 665 |
+
excel_file = pd.ExcelFile(file_path)
|
| 666 |
+
processed_sheets = 0
|
| 667 |
+
|
| 668 |
+
for sheet_name in excel_file.sheet_names:
|
| 669 |
+
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
| 670 |
+
df_clean = self._clean_dataframe(df)
|
| 671 |
+
|
| 672 |
+
print(f"\nπ Sheet: {sheet_name}")
|
| 673 |
+
print(f" Columns: {df_clean.columns.tolist()}")
|
| 674 |
+
|
| 675 |
+
# Check all types with priority
|
| 676 |
+
is_payment = self._is_payment_sheet(df_clean)
|
| 677 |
+
is_sales = self._is_sales_sheet(df_clean)
|
| 678 |
+
is_customer = self._is_customer_sheet(df_clean)
|
| 679 |
+
is_distributor = self._is_distributor_sheet(df_clean)
|
| 680 |
+
|
| 681 |
+
print(f" Detection - Payment: {is_payment}, Sales: {is_sales}, Customer: {is_customer}, Distributor: {is_distributor}")
|
| 682 |
+
|
| 683 |
+
processed = False
|
| 684 |
+
if is_payment:
|
| 685 |
+
print(" π³ Processing as PAYMENT sheet")
|
| 686 |
+
processed = self.process_payment_sheet(df_clean, file_name, sheet_name)
|
| 687 |
+
elif is_sales:
|
| 688 |
+
print(" π° Processing as SALES sheet")
|
| 689 |
+
processed = self.process_sales_sheet(df_clean, file_name, sheet_name)
|
| 690 |
+
elif is_distributor:
|
| 691 |
+
print(" π€ Processing as DISTRIBUTOR sheet")
|
| 692 |
+
processed = self.process_distributor_sheet(df_clean, file_name, sheet_name)
|
| 693 |
+
elif is_customer:
|
| 694 |
+
print(" π₯ Processing as CUSTOMER sheet")
|
| 695 |
+
processed = self.process_customer_sheet(df_clean, file_name, sheet_name)
|
| 696 |
+
else:
|
| 697 |
+
print(" β Unknown sheet type")
|
| 698 |
+
|
| 699 |
+
if processed:
|
| 700 |
+
processed_sheets += 1
|
| 701 |
+
print(f" β
Successfully processed")
|
| 702 |
+
else:
|
| 703 |
+
print(f" β Failed to process")
|
| 704 |
+
|
| 705 |
+
print(f"\nπ File processing complete: {processed_sheets}/{len(excel_file.sheet_names)} sheets processed")
|
| 706 |
+
return processed_sheets > 0
|
| 707 |
+
|
| 708 |
+
except Exception as e:
|
| 709 |
+
print(f"π₯ Error processing file {file_path}: {e}")
|
| 710 |
+
return False
|
database.py
ADDED
|
@@ -0,0 +1,893 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sqlite3
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import os
|
| 4 |
+
import logging
|
| 5 |
+
from datetime import datetime, timedelta
|
| 6 |
+
from typing import List, Dict, Any, Optional
|
| 7 |
+
import random # Add this import
|
| 8 |
+
# Set up logging
|
| 9 |
+
logging.basicConfig(level=logging.INFO)
|
| 10 |
+
logger = logging.getLogger(__name__)
|
| 11 |
+
|
| 12 |
+
class DatabaseManager:
|
| 13 |
+
def __init__(self, db_path="sales_management.db"):
|
| 14 |
+
self.db_path = db_path
|
| 15 |
+
self._is_logging = False # Prevent recursion
|
| 16 |
+
self.init_database()
|
| 17 |
+
|
| 18 |
+
def get_connection(self):
|
| 19 |
+
"""Get database connection with error handling"""
|
| 20 |
+
try:
|
| 21 |
+
conn = sqlite3.connect(self.db_path)
|
| 22 |
+
conn.row_factory = sqlite3.Row # This enables column access by name
|
| 23 |
+
return conn
|
| 24 |
+
except sqlite3.Error as e:
|
| 25 |
+
logger.error(f"Database connection error: {e}")
|
| 26 |
+
raise
|
| 27 |
+
|
| 28 |
+
def init_database(self):
|
| 29 |
+
"""Initialize database with all tables and relationships"""
|
| 30 |
+
conn = self.get_connection()
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
# Customers table
|
| 34 |
+
conn.execute('''
|
| 35 |
+
CREATE TABLE IF NOT EXISTS customers (
|
| 36 |
+
customer_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 37 |
+
customer_code TEXT UNIQUE,
|
| 38 |
+
name TEXT NOT NULL,
|
| 39 |
+
mobile TEXT,
|
| 40 |
+
village TEXT,
|
| 41 |
+
taluka TEXT,
|
| 42 |
+
district TEXT,
|
| 43 |
+
status TEXT DEFAULT 'Active',
|
| 44 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 45 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 46 |
+
)
|
| 47 |
+
''')
|
| 48 |
+
|
| 49 |
+
# Distributors table
|
| 50 |
+
conn.execute('''
|
| 51 |
+
CREATE TABLE IF NOT EXISTS distributors (
|
| 52 |
+
distributor_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 53 |
+
name TEXT NOT NULL,
|
| 54 |
+
village TEXT,
|
| 55 |
+
taluka TEXT,
|
| 56 |
+
district TEXT,
|
| 57 |
+
mantri_name TEXT,
|
| 58 |
+
mantri_mobile TEXT,
|
| 59 |
+
sabhasad_count INTEGER DEFAULT 0,
|
| 60 |
+
contact_in_group INTEGER DEFAULT 0,
|
| 61 |
+
status TEXT DEFAULT 'Active',
|
| 62 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 63 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 64 |
+
)
|
| 65 |
+
''')
|
| 66 |
+
|
| 67 |
+
# Products table
|
| 68 |
+
conn.execute('''
|
| 69 |
+
CREATE TABLE IF NOT EXISTS products (
|
| 70 |
+
product_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 71 |
+
product_name TEXT UNIQUE NOT NULL,
|
| 72 |
+
packing_type TEXT,
|
| 73 |
+
capacity_ltr REAL,
|
| 74 |
+
category TEXT,
|
| 75 |
+
standard_rate REAL,
|
| 76 |
+
is_active INTEGER DEFAULT 1,
|
| 77 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 78 |
+
)
|
| 79 |
+
''')
|
| 80 |
+
|
| 81 |
+
# Sales table
|
| 82 |
+
conn.execute('''
|
| 83 |
+
CREATE TABLE IF NOT EXISTS sales (
|
| 84 |
+
sale_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 85 |
+
invoice_no TEXT UNIQUE NOT NULL,
|
| 86 |
+
customer_id INTEGER,
|
| 87 |
+
sale_date DATE,
|
| 88 |
+
total_amount REAL DEFAULT 0,
|
| 89 |
+
total_liters REAL DEFAULT 0,
|
| 90 |
+
payment_status TEXT DEFAULT 'Pending',
|
| 91 |
+
notes TEXT,
|
| 92 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 93 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 94 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL
|
| 95 |
+
)
|
| 96 |
+
''')
|
| 97 |
+
|
| 98 |
+
# Sale items table
|
| 99 |
+
conn.execute('''
|
| 100 |
+
CREATE TABLE IF NOT EXISTS sale_items (
|
| 101 |
+
item_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 102 |
+
sale_id INTEGER,
|
| 103 |
+
product_id INTEGER,
|
| 104 |
+
quantity INTEGER,
|
| 105 |
+
rate REAL,
|
| 106 |
+
amount REAL,
|
| 107 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 108 |
+
FOREIGN KEY (sale_id) REFERENCES sales (sale_id) ON DELETE CASCADE,
|
| 109 |
+
FOREIGN KEY (product_id) REFERENCES products (product_id) ON DELETE SET NULL
|
| 110 |
+
)
|
| 111 |
+
''')
|
| 112 |
+
|
| 113 |
+
# Payments table
|
| 114 |
+
conn.execute('''
|
| 115 |
+
CREATE TABLE IF NOT EXISTS payments (
|
| 116 |
+
payment_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 117 |
+
sale_id INTEGER,
|
| 118 |
+
payment_date DATE,
|
| 119 |
+
payment_method TEXT,
|
| 120 |
+
amount REAL,
|
| 121 |
+
rrn TEXT,
|
| 122 |
+
reference TEXT,
|
| 123 |
+
status TEXT DEFAULT 'Completed',
|
| 124 |
+
notes TEXT,
|
| 125 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 126 |
+
FOREIGN KEY (sale_id) REFERENCES sales (sale_id) ON DELETE CASCADE
|
| 127 |
+
)
|
| 128 |
+
''')
|
| 129 |
+
|
| 130 |
+
# Demos table
|
| 131 |
+
conn.execute('''
|
| 132 |
+
CREATE TABLE IF NOT EXISTS demos (
|
| 133 |
+
demo_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 134 |
+
customer_id INTEGER,
|
| 135 |
+
distributor_id INTEGER,
|
| 136 |
+
demo_date DATE,
|
| 137 |
+
product_id INTEGER,
|
| 138 |
+
quantity_provided INTEGER,
|
| 139 |
+
follow_up_date DATE,
|
| 140 |
+
conversion_status TEXT DEFAULT 'Not Converted',
|
| 141 |
+
notes TEXT,
|
| 142 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 143 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 144 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 145 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL,
|
| 146 |
+
FOREIGN KEY (product_id) REFERENCES products (product_id) ON DELETE SET NULL
|
| 147 |
+
)
|
| 148 |
+
''')
|
| 149 |
+
|
| 150 |
+
# WhatsApp logs table
|
| 151 |
+
conn.execute('''
|
| 152 |
+
CREATE TABLE IF NOT EXISTS whatsapp_logs (
|
| 153 |
+
log_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 154 |
+
customer_id INTEGER,
|
| 155 |
+
distributor_id INTEGER,
|
| 156 |
+
message_type TEXT,
|
| 157 |
+
message_content TEXT,
|
| 158 |
+
status TEXT,
|
| 159 |
+
sent_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 160 |
+
response TEXT,
|
| 161 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 162 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL
|
| 163 |
+
)
|
| 164 |
+
''')
|
| 165 |
+
|
| 166 |
+
# Follow-ups table
|
| 167 |
+
conn.execute('''
|
| 168 |
+
CREATE TABLE IF NOT EXISTS follow_ups (
|
| 169 |
+
follow_up_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 170 |
+
customer_id INTEGER,
|
| 171 |
+
distributor_id INTEGER,
|
| 172 |
+
demo_id INTEGER,
|
| 173 |
+
follow_up_date DATE,
|
| 174 |
+
follow_up_type TEXT,
|
| 175 |
+
notes TEXT,
|
| 176 |
+
status TEXT DEFAULT 'Pending',
|
| 177 |
+
next_follow_up_date DATE,
|
| 178 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 179 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 180 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL,
|
| 181 |
+
FOREIGN KEY (demo_id) REFERENCES demos (demo_id) ON DELETE SET NULL
|
| 182 |
+
)
|
| 183 |
+
''')
|
| 184 |
+
|
| 185 |
+
# System logs table
|
| 186 |
+
conn.execute('''
|
| 187 |
+
CREATE TABLE IF NOT EXISTS system_logs (
|
| 188 |
+
log_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 189 |
+
log_type TEXT,
|
| 190 |
+
log_message TEXT,
|
| 191 |
+
table_name TEXT,
|
| 192 |
+
record_id INTEGER,
|
| 193 |
+
action TEXT,
|
| 194 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 195 |
+
user_info TEXT
|
| 196 |
+
)
|
| 197 |
+
''')
|
| 198 |
+
|
| 199 |
+
# Rollback logs table
|
| 200 |
+
conn.execute('''
|
| 201 |
+
CREATE TABLE IF NOT EXISTS rollback_logs (
|
| 202 |
+
rollback_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 203 |
+
table_name TEXT,
|
| 204 |
+
record_id INTEGER,
|
| 205 |
+
old_data TEXT,
|
| 206 |
+
new_data TEXT,
|
| 207 |
+
action TEXT,
|
| 208 |
+
rollback_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 209 |
+
rolled_back_by TEXT
|
| 210 |
+
)
|
| 211 |
+
''')
|
| 212 |
+
|
| 213 |
+
# Offers table
|
| 214 |
+
conn.execute('''
|
| 215 |
+
CREATE TABLE IF NOT EXISTS offers (
|
| 216 |
+
offer_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 217 |
+
offer_name TEXT NOT NULL,
|
| 218 |
+
offer_description TEXT,
|
| 219 |
+
product_id INTEGER,
|
| 220 |
+
discount_percentage REAL,
|
| 221 |
+
discount_amount REAL,
|
| 222 |
+
start_date DATE,
|
| 223 |
+
end_date DATE,
|
| 224 |
+
status TEXT DEFAULT 'Active',
|
| 225 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 226 |
+
FOREIGN KEY (product_id) REFERENCES products (product_id) ON DELETE SET NULL
|
| 227 |
+
)
|
| 228 |
+
''')
|
| 229 |
+
|
| 230 |
+
# Demo teams table
|
| 231 |
+
conn.execute('''
|
| 232 |
+
CREATE TABLE IF NOT EXISTS demo_teams (
|
| 233 |
+
team_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 234 |
+
team_name TEXT NOT NULL,
|
| 235 |
+
team_leader TEXT,
|
| 236 |
+
team_members TEXT,
|
| 237 |
+
assigned_villages TEXT,
|
| 238 |
+
status TEXT DEFAULT 'Active',
|
| 239 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 240 |
+
)
|
| 241 |
+
''')
|
| 242 |
+
|
| 243 |
+
conn.commit()
|
| 244 |
+
logger.info("Database tables initialized successfully")
|
| 245 |
+
|
| 246 |
+
except sqlite3.Error as e:
|
| 247 |
+
logger.error(f"Error initializing database: {e}")
|
| 248 |
+
raise
|
| 249 |
+
finally:
|
| 250 |
+
conn.close()
|
| 251 |
+
|
| 252 |
+
self.initialize_default_data()
|
| 253 |
+
self.create_indexes()
|
| 254 |
+
|
| 255 |
+
def create_indexes(self):
|
| 256 |
+
"""Create indexes for better performance"""
|
| 257 |
+
conn = self.get_connection()
|
| 258 |
+
|
| 259 |
+
try:
|
| 260 |
+
# Create indexes for frequently queried columns
|
| 261 |
+
indexes = [
|
| 262 |
+
"CREATE INDEX IF NOT EXISTS idx_customers_village ON customers(village)",
|
| 263 |
+
"CREATE INDEX IF NOT EXISTS idx_customers_mobile ON customers(mobile)",
|
| 264 |
+
"CREATE INDEX IF NOT EXISTS idx_sales_customer_id ON sales(customer_id)",
|
| 265 |
+
"CREATE INDEX IF NOT EXISTS idx_sales_date ON sales(sale_date)",
|
| 266 |
+
"CREATE INDEX IF NOT EXISTS idx_sales_invoice ON sales(invoice_no)",
|
| 267 |
+
"CREATE INDEX IF NOT EXISTS idx_payments_sale_id ON payments(sale_id)",
|
| 268 |
+
"CREATE INDEX IF NOT EXISTS idx_demos_customer_id ON demos(customer_id)",
|
| 269 |
+
"CREATE INDEX IF NOT EXISTS idx_demos_date ON demos(demo_date)",
|
| 270 |
+
"CREATE INDEX IF NOT EXISTS idx_sale_items_sale_id ON sale_items(sale_id)",
|
| 271 |
+
"CREATE INDEX IF NOT EXISTS idx_follow_ups_date ON follow_ups(follow_up_date)",
|
| 272 |
+
"CREATE INDEX IF NOT EXISTS idx_whatsapp_customer_id ON whatsapp_logs(customer_id)"
|
| 273 |
+
]
|
| 274 |
+
|
| 275 |
+
for index_sql in indexes:
|
| 276 |
+
conn.execute(index_sql)
|
| 277 |
+
|
| 278 |
+
conn.commit()
|
| 279 |
+
logger.info("Database indexes created successfully")
|
| 280 |
+
|
| 281 |
+
except sqlite3.Error as e:
|
| 282 |
+
logger.error(f"Error creating indexes: {e}")
|
| 283 |
+
finally:
|
| 284 |
+
conn.close()
|
| 285 |
+
|
| 286 |
+
def initialize_default_data(self):
|
| 287 |
+
"""Initialize with default products and demo teams"""
|
| 288 |
+
default_products = [
|
| 289 |
+
('1 LTR PLASTIC JAR', 'PLASTIC_JAR', 1.0, 'Regular', 95),
|
| 290 |
+
('2 LTR PLASTIC JAR', 'PLASTIC_JAR', 2.0, 'Regular', 185),
|
| 291 |
+
('5 LTR PLASTIC JAR', 'PLASTIC_JAR', 5.0, 'Regular', 460),
|
| 292 |
+
('5 LTR STEEL BARNI', 'STEEL_BARNI', 5.0, 'Premium', 680),
|
| 293 |
+
('10 LTR STEEL BARNI', 'STEEL_BARNI', 10.0, 'Premium', 1300),
|
| 294 |
+
('20 LTR STEEL BARNI', 'STEEL_BARNI', 20.0, 'Premium', 2950),
|
| 295 |
+
('20 LTR PLASTIC CAN', 'PLASTIC_CAN', 20.0, 'Regular', 2400),
|
| 296 |
+
('1 LTR PET BOTTLE', 'PET_BOTTLE', 1.0, 'Regular', 85)
|
| 297 |
+
]
|
| 298 |
+
|
| 299 |
+
default_teams = [
|
| 300 |
+
('Team A - North Region', 'Rajesh Kumar', 'Mohan, Suresh, Priya', 'Amiyad, Amvad, Ankalav'),
|
| 301 |
+
('Team B - South Region', 'Sunil Patel', 'Anita, Vijay, Deepak', 'Petlad, Borsad, Vadodara')
|
| 302 |
+
]
|
| 303 |
+
|
| 304 |
+
conn = self.get_connection()
|
| 305 |
+
try:
|
| 306 |
+
# Insert default products
|
| 307 |
+
for product in default_products:
|
| 308 |
+
conn.execute('''
|
| 309 |
+
INSERT OR IGNORE INTO products (product_name, packing_type, capacity_ltr, category, standard_rate)
|
| 310 |
+
VALUES (?, ?, ?, ?, ?)
|
| 311 |
+
''', product)
|
| 312 |
+
|
| 313 |
+
# Insert default demo teams
|
| 314 |
+
for team in default_teams:
|
| 315 |
+
conn.execute('''
|
| 316 |
+
INSERT OR IGNORE INTO demo_teams (team_name, team_leader, team_members, assigned_villages)
|
| 317 |
+
VALUES (?, ?, ?, ?)
|
| 318 |
+
''', team)
|
| 319 |
+
|
| 320 |
+
conn.commit()
|
| 321 |
+
logger.info("Default data initialized successfully")
|
| 322 |
+
|
| 323 |
+
except sqlite3.Error as e:
|
| 324 |
+
logger.error(f"Error initializing default data: {e}")
|
| 325 |
+
finally:
|
| 326 |
+
conn.close()
|
| 327 |
+
|
| 328 |
+
def _execute_query_internal(self, query: str, params: tuple = None) -> List[tuple]:
|
| 329 |
+
"""Internal method to execute SQL query without logging"""
|
| 330 |
+
conn = self.get_connection()
|
| 331 |
+
try:
|
| 332 |
+
cursor = conn.cursor()
|
| 333 |
+
if params:
|
| 334 |
+
cursor.execute(query, params)
|
| 335 |
+
else:
|
| 336 |
+
cursor.execute(query)
|
| 337 |
+
|
| 338 |
+
# Only try to fetch results for SELECT queries
|
| 339 |
+
if query.strip().upper().startswith('SELECT'):
|
| 340 |
+
result = cursor.fetchall()
|
| 341 |
+
else:
|
| 342 |
+
result = []
|
| 343 |
+
|
| 344 |
+
conn.commit()
|
| 345 |
+
return result
|
| 346 |
+
|
| 347 |
+
except sqlite3.Error as e:
|
| 348 |
+
logger.error(f"Database query error: {e}")
|
| 349 |
+
conn.rollback()
|
| 350 |
+
raise
|
| 351 |
+
finally:
|
| 352 |
+
conn.close()
|
| 353 |
+
|
| 354 |
+
def execute_query(self, query: str, params: tuple = None, log_action: bool = True) -> List[tuple]:
|
| 355 |
+
"""Execute a SQL query with comprehensive error handling"""
|
| 356 |
+
try:
|
| 357 |
+
result = self._execute_query_internal(query, params)
|
| 358 |
+
|
| 359 |
+
# Log the query execution (but avoid recursion)
|
| 360 |
+
if log_action and not self._is_logging:
|
| 361 |
+
try:
|
| 362 |
+
self._is_logging = True
|
| 363 |
+
self._execute_query_internal('''
|
| 364 |
+
INSERT INTO system_logs (log_type, log_message, table_name, record_id, action)
|
| 365 |
+
VALUES (?, ?, ?, ?, ?)
|
| 366 |
+
''', ('QUERY_EXECUTION', f"Executed query: {query[:100]}...", None, None, 'EXECUTE'))
|
| 367 |
+
except Exception as e:
|
| 368 |
+
logger.error(f"Error logging system action: {e}")
|
| 369 |
+
finally:
|
| 370 |
+
self._is_logging = False
|
| 371 |
+
|
| 372 |
+
return result
|
| 373 |
+
except Exception as e:
|
| 374 |
+
logger.error(f"Error in execute_query: {e}")
|
| 375 |
+
return [] # Return empty list instead of raising exception
|
| 376 |
+
|
| 377 |
+
def get_dataframe(self, table_name: str = None, query: str = None, params: tuple = None) -> pd.DataFrame:
|
| 378 |
+
"""Get table data as DataFrame with flexible query support"""
|
| 379 |
+
conn = self.get_connection()
|
| 380 |
+
try:
|
| 381 |
+
if query:
|
| 382 |
+
df = pd.read_sql_query(query, conn, params=params)
|
| 383 |
+
else:
|
| 384 |
+
df = pd.read_sql_query(f"SELECT * FROM {table_name}", conn)
|
| 385 |
+
return df
|
| 386 |
+
except Exception as e:
|
| 387 |
+
logger.error(f"Error getting DataFrame for {table_name if table_name else 'query'}: {e}")
|
| 388 |
+
# Return empty DataFrame with proper structure
|
| 389 |
+
return pd.DataFrame()
|
| 390 |
+
finally:
|
| 391 |
+
conn.close()
|
| 392 |
+
|
| 393 |
+
def add_customer(self, name: str, mobile: str = "", village: str = "", taluka: str = "",
|
| 394 |
+
district: str = "", customer_code: str = None) -> int:
|
| 395 |
+
"""Add a new customer with duplicate handling"""
|
| 396 |
+
|
| 397 |
+
# Generate customer code if not provided
|
| 398 |
+
if not customer_code:
|
| 399 |
+
customer_code = f"CUST{datetime.now().strftime('%Y%m%d%H%M%S')}{random.randint(100, 999)}"
|
| 400 |
+
|
| 401 |
+
try:
|
| 402 |
+
# Check if customer already exists (by mobile or similar name+village)
|
| 403 |
+
existing_customer = self.execute_query(
|
| 404 |
+
'SELECT customer_id FROM customers WHERE mobile = ? OR (name = ? AND village = ?)',
|
| 405 |
+
(mobile, name, village),
|
| 406 |
+
log_action=False
|
| 407 |
+
)
|
| 408 |
+
|
| 409 |
+
if existing_customer:
|
| 410 |
+
# Customer already exists, return existing ID
|
| 411 |
+
return existing_customer[0][0]
|
| 412 |
+
|
| 413 |
+
# If customer_code already exists, generate a new one
|
| 414 |
+
max_attempts = 5
|
| 415 |
+
for attempt in range(max_attempts):
|
| 416 |
+
try:
|
| 417 |
+
result = self.execute_query('''
|
| 418 |
+
INSERT INTO customers (customer_code, name, mobile, village, taluka, district)
|
| 419 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 420 |
+
''', (customer_code, name, mobile, village, taluka, district), log_action=False)
|
| 421 |
+
break
|
| 422 |
+
except sqlite3.IntegrityError as e:
|
| 423 |
+
if "UNIQUE constraint failed: customers.customer_code" in str(e) and attempt < max_attempts - 1:
|
| 424 |
+
# Generate new unique customer code
|
| 425 |
+
customer_code = f"CUST{datetime.now().strftime('%Y%m%d%H%M%S')}{random.randint(1000, 9999)}"
|
| 426 |
+
continue
|
| 427 |
+
else:
|
| 428 |
+
raise e
|
| 429 |
+
|
| 430 |
+
# Get the inserted customer_id
|
| 431 |
+
customer_id = self.execute_query('SELECT last_insert_rowid()', log_action=False)[0][0]
|
| 432 |
+
|
| 433 |
+
self.log_system_action('CUSTOMER_ADD', f"Added customer: {name}", 'customers', customer_id, 'INSERT')
|
| 434 |
+
|
| 435 |
+
return customer_id
|
| 436 |
+
except Exception as e:
|
| 437 |
+
logger.error(f"Error adding customer: {e}")
|
| 438 |
+
# Return a fallback - this won't be in database but prevents crashes
|
| 439 |
+
return -1
|
| 440 |
+
def add_distributor(self, name: str, village: str = "", taluka: str = "", district: str = "",
|
| 441 |
+
mantri_name: str = "", mantri_mobile: str = "", sabhasad_count: int = 0,
|
| 442 |
+
contact_in_group: int = 0, status: str = "Active") -> int:
|
| 443 |
+
"""Add a new distributor with duplicate handling"""
|
| 444 |
+
|
| 445 |
+
try:
|
| 446 |
+
# Check if distributor already exists
|
| 447 |
+
existing_distributor = self.execute_query(
|
| 448 |
+
'SELECT distributor_id FROM distributors WHERE name = ? AND village = ? AND taluka = ?',
|
| 449 |
+
(name, village, taluka),
|
| 450 |
+
log_action=False
|
| 451 |
+
)
|
| 452 |
+
|
| 453 |
+
if existing_distributor:
|
| 454 |
+
# Distributor already exists, return existing ID
|
| 455 |
+
return existing_distributor[0][0]
|
| 456 |
+
|
| 457 |
+
# Insert new distributor
|
| 458 |
+
self.execute_query('''
|
| 459 |
+
INSERT INTO distributors (name, village, taluka, district, mantri_name, mantri_mobile,
|
| 460 |
+
sabhasad_count, contact_in_group, status)
|
| 461 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 462 |
+
''', (name, village, taluka, district, mantri_name, mantri_mobile,
|
| 463 |
+
sabhasad_count, contact_in_group, status), log_action=False)
|
| 464 |
+
|
| 465 |
+
# Get the inserted distributor_id
|
| 466 |
+
distributor_id = self.execute_query('SELECT last_insert_rowid()', log_action=False)[0][0]
|
| 467 |
+
|
| 468 |
+
self.log_system_action('DISTRIBUTOR_ADD', f"Added distributor: {name}", 'distributors', distributor_id, 'INSERT')
|
| 469 |
+
|
| 470 |
+
return distributor_id
|
| 471 |
+
|
| 472 |
+
except Exception as e:
|
| 473 |
+
logger.error(f"Error adding distributor: {e}")
|
| 474 |
+
return -1
|
| 475 |
+
def get_distributor_by_location(self, village: str, taluka: str) -> Optional[Dict]:
|
| 476 |
+
"""Get distributor by village and taluka"""
|
| 477 |
+
try:
|
| 478 |
+
result = self.execute_query(
|
| 479 |
+
'SELECT * FROM distributors WHERE village = ? AND taluka = ?',
|
| 480 |
+
(village, taluka),
|
| 481 |
+
log_action=False
|
| 482 |
+
)
|
| 483 |
+
if result:
|
| 484 |
+
return dict(result[0])
|
| 485 |
+
return None
|
| 486 |
+
except Exception as e:
|
| 487 |
+
logger.error(f"Error getting distributor by location: {e}")
|
| 488 |
+
return None
|
| 489 |
+
|
| 490 |
+
def distributor_exists(self, name: str, village: str, taluka: str) -> bool:
|
| 491 |
+
"""Check if distributor already exists"""
|
| 492 |
+
try:
|
| 493 |
+
result = self.execute_query(
|
| 494 |
+
'SELECT distributor_id FROM distributors WHERE name = ? AND village = ? AND taluka = ?',
|
| 495 |
+
(name, village, taluka),
|
| 496 |
+
log_action=False
|
| 497 |
+
)
|
| 498 |
+
return len(result) > 0
|
| 499 |
+
except Exception as e:
|
| 500 |
+
logger.error(f"Error checking distributor existence: {e}")
|
| 501 |
+
return False
|
| 502 |
+
# In your DatabaseManager class in database.py, replace the generate_invoice_number method:
|
| 503 |
+
|
| 504 |
+
def generate_invoice_number(self):
|
| 505 |
+
"""Generate automatic invoice number in format: INVCLmmyyserial"""
|
| 506 |
+
try:
|
| 507 |
+
# Get current date components
|
| 508 |
+
now = datetime.now()
|
| 509 |
+
month = now.strftime('%m') # Two-digit month
|
| 510 |
+
year = now.strftime('%y') # Two-digit year
|
| 511 |
+
|
| 512 |
+
# Get the last invoice number for this month-year
|
| 513 |
+
result = self.execute_query(
|
| 514 |
+
"SELECT invoice_no FROM sales WHERE invoice_no LIKE ? ORDER BY sale_id DESC LIMIT 1",
|
| 515 |
+
(f"INVCL{month}{year}%",),
|
| 516 |
+
log_action=False
|
| 517 |
+
)
|
| 518 |
+
|
| 519 |
+
if result:
|
| 520 |
+
last_invoice = result[0][0]
|
| 521 |
+
# Extract serial number and increment
|
| 522 |
+
try:
|
| 523 |
+
# Format: INVCLmmyyXXX
|
| 524 |
+
serial_part = last_invoice[8:] # Get part after INVCLmmyy
|
| 525 |
+
last_serial = int(serial_part)
|
| 526 |
+
new_serial = last_serial + 1
|
| 527 |
+
except ValueError:
|
| 528 |
+
new_serial = 1
|
| 529 |
+
else:
|
| 530 |
+
# First invoice of the month-year
|
| 531 |
+
new_serial = 1
|
| 532 |
+
|
| 533 |
+
# Format: INVCL + month(2) + year(2) + serial(3 digits)
|
| 534 |
+
return f"INVCL{month}{year}{new_serial:03d}"
|
| 535 |
+
|
| 536 |
+
except Exception as e:
|
| 537 |
+
logger.error(f"Error generating invoice number: {e}")
|
| 538 |
+
# Fallback: timestamp-based
|
| 539 |
+
return f"INVCL{int(datetime.now().timestamp())}"
|
| 540 |
+
|
| 541 |
+
# Or if you want a more flexible version with configurable prefix:
|
| 542 |
+
def generate_invoice_number(self, prefix="INVCL"):
|
| 543 |
+
"""Generate automatic invoice number in format: PREFIXmmyyserial"""
|
| 544 |
+
try:
|
| 545 |
+
now = datetime.now()
|
| 546 |
+
month = now.strftime('%m')
|
| 547 |
+
year = now.strftime('%y')
|
| 548 |
+
|
| 549 |
+
result = self.execute_query(
|
| 550 |
+
"SELECT invoice_no FROM sales WHERE invoice_no LIKE ? ORDER BY sale_id DESC LIMIT 1",
|
| 551 |
+
(f"{prefix}{month}{year}%",),
|
| 552 |
+
log_action=False
|
| 553 |
+
)
|
| 554 |
+
|
| 555 |
+
if result:
|
| 556 |
+
last_invoice = result[0][0]
|
| 557 |
+
try:
|
| 558 |
+
# Remove prefix and date part, get serial
|
| 559 |
+
serial_part = last_invoice[len(prefix) + 4:] # prefix + 4 digits (mmyy)
|
| 560 |
+
last_serial = int(serial_part)
|
| 561 |
+
new_serial = last_serial + 1
|
| 562 |
+
except ValueError:
|
| 563 |
+
new_serial = 1
|
| 564 |
+
else:
|
| 565 |
+
new_serial = 1
|
| 566 |
+
|
| 567 |
+
return f"{prefix}{month}{year}{new_serial:03d}"
|
| 568 |
+
|
| 569 |
+
except Exception as e:
|
| 570 |
+
logger.error(f"Error generating invoice number: {e}")
|
| 571 |
+
return f"{prefix}{int(datetime.now().timestamp())}"
|
| 572 |
+
|
| 573 |
+
# Add to your DatabaseManager class in database.py
|
| 574 |
+
|
| 575 |
+
def add_sale(self, invoice_no: str, customer_id: int, sale_date, items: List[Dict],
|
| 576 |
+
payments: List[Dict] = None, notes: str = "") -> int:
|
| 577 |
+
"""Add a new sale with items and optional payments - ENHANCED"""
|
| 578 |
+
conn = self.get_connection()
|
| 579 |
+
try:
|
| 580 |
+
cursor = conn.cursor()
|
| 581 |
+
|
| 582 |
+
# Calculate total amount and liters
|
| 583 |
+
total_amount = sum(item['quantity'] * item['rate'] for item in items)
|
| 584 |
+
total_liters = sum(item.get('liters', 0) for item in items)
|
| 585 |
+
|
| 586 |
+
print(f"π§ DEBUG: Creating sale - Invoice: {invoice_no}, Customer: {customer_id}, Total: {total_amount}") # DEBUG
|
| 587 |
+
|
| 588 |
+
# Add sale record
|
| 589 |
+
cursor.execute('''
|
| 590 |
+
INSERT INTO sales (invoice_no, customer_id, sale_date, total_amount, total_liters, notes)
|
| 591 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 592 |
+
''', (invoice_no, customer_id, sale_date, total_amount, total_liters, notes))
|
| 593 |
+
|
| 594 |
+
# Get the sale ID
|
| 595 |
+
sale_id = cursor.lastrowid
|
| 596 |
+
print(f"π§ DEBUG: Sale created with ID: {sale_id}") # DEBUG
|
| 597 |
+
|
| 598 |
+
# Add sale items
|
| 599 |
+
for item in items:
|
| 600 |
+
amount = item['quantity'] * item['rate']
|
| 601 |
+
print(f"π§ DEBUG: Adding item - Product: {item['product_id']}, Qty: {item['quantity']}, Rate: {item['rate']}") # DEBUG
|
| 602 |
+
|
| 603 |
+
cursor.execute('''
|
| 604 |
+
INSERT INTO sale_items (sale_id, product_id, quantity, rate, amount)
|
| 605 |
+
VALUES (?, ?, ?, ?, ?)
|
| 606 |
+
''', (sale_id, item['product_id'], item['quantity'], item['rate'], amount))
|
| 607 |
+
|
| 608 |
+
# Add payments if provided
|
| 609 |
+
if payments:
|
| 610 |
+
for payment in payments:
|
| 611 |
+
cursor.execute('''
|
| 612 |
+
INSERT INTO payments (sale_id, payment_date, payment_method, amount, rrn, reference)
|
| 613 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 614 |
+
''', (sale_id, payment['payment_date'], payment['method'],
|
| 615 |
+
payment['amount'], payment.get('rrn', ''), payment.get('reference', '')))
|
| 616 |
+
|
| 617 |
+
conn.commit()
|
| 618 |
+
|
| 619 |
+
# Update payment status
|
| 620 |
+
self._update_payment_status(sale_id)
|
| 621 |
+
|
| 622 |
+
print(f"π§ DEBUG: Sale {sale_id} completed successfully") # DEBUG
|
| 623 |
+
return sale_id
|
| 624 |
+
|
| 625 |
+
except Exception as e:
|
| 626 |
+
conn.rollback()
|
| 627 |
+
logger.error(f"Error adding sale: {e}")
|
| 628 |
+
print(f"β ERROR in add_sale: {e}") # DEBUG
|
| 629 |
+
raise
|
| 630 |
+
finally:
|
| 631 |
+
conn.close()
|
| 632 |
+
|
| 633 |
+
def _update_payment_status(self, sale_id: int):
|
| 634 |
+
"""Update payment status for a sale"""
|
| 635 |
+
conn = self.get_connection()
|
| 636 |
+
try:
|
| 637 |
+
# Get total paid amount
|
| 638 |
+
cursor = conn.cursor()
|
| 639 |
+
cursor.execute('SELECT COALESCE(SUM(amount), 0) FROM payments WHERE sale_id = ?', (sale_id,))
|
| 640 |
+
total_paid = cursor.fetchone()[0]
|
| 641 |
+
|
| 642 |
+
# Get sale total
|
| 643 |
+
cursor.execute('SELECT total_amount FROM sales WHERE sale_id = ?', (sale_id,))
|
| 644 |
+
sale_total = cursor.fetchone()[0]
|
| 645 |
+
|
| 646 |
+
# Determine payment status
|
| 647 |
+
if total_paid >= sale_total:
|
| 648 |
+
status = 'Paid'
|
| 649 |
+
elif total_paid > 0:
|
| 650 |
+
status = 'Partial'
|
| 651 |
+
else:
|
| 652 |
+
status = 'Pending'
|
| 653 |
+
|
| 654 |
+
# Update status
|
| 655 |
+
cursor.execute('UPDATE sales SET payment_status = ? WHERE sale_id = ?', (status, sale_id))
|
| 656 |
+
conn.commit()
|
| 657 |
+
|
| 658 |
+
except Exception as e:
|
| 659 |
+
logger.error(f"Error updating payment status: {e}")
|
| 660 |
+
finally:
|
| 661 |
+
conn.close()
|
| 662 |
+
|
| 663 |
+
def get_pending_payments(self) -> pd.DataFrame:
|
| 664 |
+
"""Get all pending payments with customer details"""
|
| 665 |
+
return self.get_dataframe('sales', '''
|
| 666 |
+
SELECT s.sale_id, s.invoice_no, s.sale_date, c.name as customer_name,
|
| 667 |
+
c.mobile, c.village, s.total_amount,
|
| 668 |
+
(s.total_amount - COALESCE(SUM(p.amount), 0)) as pending_amount,
|
| 669 |
+
COALESCE(SUM(p.amount), 0) as paid_amount
|
| 670 |
+
FROM sales s
|
| 671 |
+
LEFT JOIN customers c ON s.customer_id = c.customer_id
|
| 672 |
+
LEFT JOIN payments p ON s.sale_id = p.sale_id
|
| 673 |
+
WHERE s.payment_status IN ('Pending', 'Partial')
|
| 674 |
+
GROUP BY s.sale_id
|
| 675 |
+
HAVING pending_amount > 0
|
| 676 |
+
ORDER BY s.sale_date DESC
|
| 677 |
+
''')
|
| 678 |
+
|
| 679 |
+
def get_demo_conversions(self) -> pd.DataFrame:
|
| 680 |
+
"""Get demo conversion statistics with details"""
|
| 681 |
+
return self.get_dataframe('demos', '''
|
| 682 |
+
SELECT d.*, c.name as customer_name, p.product_name,
|
| 683 |
+
dist.name as distributor_name, c.village, c.taluka,
|
| 684 |
+
CASE WHEN d.conversion_status = 'Converted' THEN 1 ELSE 0 END as converted
|
| 685 |
+
FROM demos d
|
| 686 |
+
LEFT JOIN customers c ON d.customer_id = c.customer_id
|
| 687 |
+
LEFT JOIN products p ON d.product_id = p.product_id
|
| 688 |
+
LEFT JOIN distributors dist ON d.distributor_id = dist.distributor_id
|
| 689 |
+
ORDER BY d.demo_date DESC
|
| 690 |
+
''')
|
| 691 |
+
|
| 692 |
+
def get_sales_analytics(self, start_date: str = None, end_date: str = None) -> Dict:
|
| 693 |
+
"""Get comprehensive sales analytics"""
|
| 694 |
+
if not start_date:
|
| 695 |
+
start_date = (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')
|
| 696 |
+
if not end_date:
|
| 697 |
+
end_date = datetime.now().strftime('%Y-%m-%d')
|
| 698 |
+
|
| 699 |
+
query = '''
|
| 700 |
+
SELECT
|
| 701 |
+
COUNT(*) as total_sales,
|
| 702 |
+
SUM(total_amount) as total_revenue,
|
| 703 |
+
AVG(total_amount) as avg_sale_value,
|
| 704 |
+
COUNT(DISTINCT customer_id) as unique_customers,
|
| 705 |
+
SUM(CASE WHEN payment_status = 'Paid' THEN 1 ELSE 0 END) as completed_payments,
|
| 706 |
+
SUM(CASE WHEN payment_status IN ('Pending', 'Partial') THEN 1 ELSE 0 END) as pending_payments
|
| 707 |
+
FROM sales
|
| 708 |
+
WHERE sale_date BETWEEN ? AND ?
|
| 709 |
+
'''
|
| 710 |
+
|
| 711 |
+
result = self.execute_query(query, (start_date, end_date), log_action=False)
|
| 712 |
+
|
| 713 |
+
if result:
|
| 714 |
+
row = result[0]
|
| 715 |
+
return {
|
| 716 |
+
'total_sales': row[0] or 0,
|
| 717 |
+
'total_revenue': row[1] or 0,
|
| 718 |
+
'avg_sale_value': row[2] or 0,
|
| 719 |
+
'unique_customers': row[3] or 0,
|
| 720 |
+
'completed_payments': row[4] or 0,
|
| 721 |
+
'pending_payments': row[5] or 0
|
| 722 |
+
}
|
| 723 |
+
return {}
|
| 724 |
+
|
| 725 |
+
def log_system_action(self, log_type: str, message: str, table_name: str = None,
|
| 726 |
+
record_id: int = None, action: str = None):
|
| 727 |
+
"""Log system actions for audit trail - without recursion"""
|
| 728 |
+
if self._is_logging:
|
| 729 |
+
return # Prevent recursion
|
| 730 |
+
|
| 731 |
+
try:
|
| 732 |
+
self._is_logging = True
|
| 733 |
+
self._execute_query_internal('''
|
| 734 |
+
INSERT INTO system_logs (log_type, log_message, table_name, record_id, action)
|
| 735 |
+
VALUES (?, ?, ?, ?, ?)
|
| 736 |
+
''', (log_type, message, table_name, record_id, action))
|
| 737 |
+
except Exception as e:
|
| 738 |
+
logger.error(f"Error logging system action: {e}")
|
| 739 |
+
finally:
|
| 740 |
+
self._is_logging = False
|
| 741 |
+
|
| 742 |
+
def create_rollback_point(self, table_name: str, record_id: int, old_data: str,
|
| 743 |
+
new_data: str, action: str):
|
| 744 |
+
"""Create a rollback point for data changes"""
|
| 745 |
+
try:
|
| 746 |
+
self.execute_query('''
|
| 747 |
+
INSERT INTO rollback_logs (table_name, record_id, old_data, new_data, action)
|
| 748 |
+
VALUES (?, ?, ?, ?, ?)
|
| 749 |
+
''', (table_name, record_id, old_data, new_data, action), log_action=False)
|
| 750 |
+
except Exception as e:
|
| 751 |
+
logger.error(f"Error creating rollback point: {e}")
|
| 752 |
+
|
| 753 |
+
def get_recent_activity(self, limit: int = 10) -> pd.DataFrame:
|
| 754 |
+
"""Get recent system activity"""
|
| 755 |
+
return self.get_dataframe('system_logs', f'''
|
| 756 |
+
SELECT log_type, log_message, table_name, record_id, action, created_date
|
| 757 |
+
FROM system_logs
|
| 758 |
+
ORDER BY created_date DESC
|
| 759 |
+
LIMIT {limit}
|
| 760 |
+
''')
|
| 761 |
+
|
| 762 |
+
def backup_database(self, backup_path: str = None):
|
| 763 |
+
"""Create a database backup"""
|
| 764 |
+
if not backup_path:
|
| 765 |
+
backup_path = f"backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
|
| 766 |
+
|
| 767 |
+
try:
|
| 768 |
+
conn = self.get_connection()
|
| 769 |
+
backup_conn = sqlite3.connect(backup_path)
|
| 770 |
+
|
| 771 |
+
with backup_conn:
|
| 772 |
+
conn.backup(backup_conn)
|
| 773 |
+
|
| 774 |
+
conn.close()
|
| 775 |
+
backup_conn.close()
|
| 776 |
+
|
| 777 |
+
logger.info(f"Database backup created: {backup_path}")
|
| 778 |
+
return backup_path
|
| 779 |
+
|
| 780 |
+
except Exception as e:
|
| 781 |
+
logger.error(f"Error creating database backup: {e}")
|
| 782 |
+
return None
|
| 783 |
+
|
| 784 |
+
def get_village_wise_sales(self) -> pd.DataFrame:
|
| 785 |
+
"""Get sales data grouped by village"""
|
| 786 |
+
return self.get_dataframe('sales', '''
|
| 787 |
+
SELECT c.village, COUNT(s.sale_id) as total_sales,
|
| 788 |
+
SUM(s.total_amount) as total_revenue,
|
| 789 |
+
AVG(s.total_amount) as avg_sale_value,
|
| 790 |
+
COUNT(DISTINCT s.customer_id) as unique_customers
|
| 791 |
+
FROM sales s
|
| 792 |
+
JOIN customers c ON s.customer_id = c.customer_id
|
| 793 |
+
WHERE c.village IS NOT NULL AND c.village != ''
|
| 794 |
+
GROUP BY c.village
|
| 795 |
+
ORDER BY total_revenue DESC
|
| 796 |
+
''')
|
| 797 |
+
|
| 798 |
+
def get_product_performance(self) -> pd.DataFrame:
|
| 799 |
+
"""Get product performance analytics"""
|
| 800 |
+
return self.get_dataframe('sale_items', '''
|
| 801 |
+
SELECT p.product_name, COUNT(si.item_id) as times_sold,
|
| 802 |
+
SUM(si.quantity) as total_quantity,
|
| 803 |
+
SUM(si.amount) as total_revenue,
|
| 804 |
+
AVG(si.rate) as avg_rate
|
| 805 |
+
FROM sale_items si
|
| 806 |
+
JOIN products p ON si.product_id = p.product_id
|
| 807 |
+
GROUP BY p.product_id, p.product_name
|
| 808 |
+
ORDER BY total_revenue DESC
|
| 809 |
+
''')
|
| 810 |
+
|
| 811 |
+
def get_upcoming_follow_ups(self) -> pd.DataFrame:
|
| 812 |
+
"""Get upcoming follow-ups"""
|
| 813 |
+
return self.get_dataframe('follow_ups', '''
|
| 814 |
+
SELECT f.*, c.name as customer_name, c.mobile,
|
| 815 |
+
d.name as distributor_name, dm.demo_date
|
| 816 |
+
FROM follow_ups f
|
| 817 |
+
LEFT JOIN customers c ON f.customer_id = c.customer_id
|
| 818 |
+
LEFT JOIN distributors d ON f.distributor_id = d.distributor_id
|
| 819 |
+
LEFT JOIN demos dm ON f.demo_id = dm.demo_id
|
| 820 |
+
WHERE f.follow_up_date >= date('now')
|
| 821 |
+
AND f.status = 'Pending'
|
| 822 |
+
ORDER BY f.follow_up_date ASC
|
| 823 |
+
LIMIT 20
|
| 824 |
+
''')
|
| 825 |
+
|
| 826 |
+
def get_whatsapp_logs(self, customer_id: int = None) -> pd.DataFrame:
|
| 827 |
+
"""Get WhatsApp communication logs"""
|
| 828 |
+
if customer_id:
|
| 829 |
+
return self.get_dataframe('whatsapp_logs', '''
|
| 830 |
+
SELECT w.*, c.name as customer_name, c.mobile
|
| 831 |
+
FROM whatsapp_logs w
|
| 832 |
+
LEFT JOIN customers c ON w.customer_id = c.customer_id
|
| 833 |
+
WHERE w.customer_id = ?
|
| 834 |
+
ORDER BY w.sent_date DESC
|
| 835 |
+
''', (customer_id,))
|
| 836 |
+
else:
|
| 837 |
+
return self.get_dataframe('whatsapp_logs', '''
|
| 838 |
+
SELECT w.*, c.name as customer_name, c.mobile
|
| 839 |
+
FROM whatsapp_logs w
|
| 840 |
+
LEFT JOIN customers c ON w.customer_id = c.customer_id
|
| 841 |
+
ORDER BY w.sent_date DESC
|
| 842 |
+
LIMIT 50
|
| 843 |
+
''')
|
| 844 |
+
|
| 845 |
+
def cleanup_old_data(self, days: int = 365):
|
| 846 |
+
"""Clean up old data (logs, etc.) older than specified days"""
|
| 847 |
+
try:
|
| 848 |
+
cutoff_date = (datetime.now() - timedelta(days=days)).strftime('%Y-%m-%d')
|
| 849 |
+
|
| 850 |
+
# Clean system logs
|
| 851 |
+
self.execute_query('DELETE FROM system_logs WHERE created_date < ?', (cutoff_date,), log_action=False)
|
| 852 |
+
|
| 853 |
+
# Clean rollback logs
|
| 854 |
+
self.execute_query('DELETE FROM rollback_logs WHERE rollback_date < ?', (cutoff_date,), log_action=False)
|
| 855 |
+
|
| 856 |
+
logger.info(f"Cleaned up data older than {days} days")
|
| 857 |
+
|
| 858 |
+
except Exception as e:
|
| 859 |
+
logger.error(f"Error cleaning up old data: {e}")
|
| 860 |
+
|
| 861 |
+
# Utility function to check database health
|
| 862 |
+
def check_database_health(db_path: str = "sales_management.db") -> Dict:
|
| 863 |
+
"""Check database health and statistics"""
|
| 864 |
+
try:
|
| 865 |
+
db = DatabaseManager(db_path)
|
| 866 |
+
|
| 867 |
+
# Get table counts
|
| 868 |
+
tables = ['customers', 'sales', 'distributors', 'demos', 'payments', 'products']
|
| 869 |
+
counts = {}
|
| 870 |
+
|
| 871 |
+
for table in tables:
|
| 872 |
+
result = db.execute_query(f"SELECT COUNT(*) FROM {table}", log_action=False)
|
| 873 |
+
counts[table] = result[0][0] if result else 0
|
| 874 |
+
|
| 875 |
+
# Get database size
|
| 876 |
+
db_size = os.path.getsize(db_path) if os.path.exists(db_path) else 0
|
| 877 |
+
|
| 878 |
+
return {
|
| 879 |
+
'status': 'healthy',
|
| 880 |
+
'table_counts': counts,
|
| 881 |
+
'database_size_mb': round(db_size / (1024 * 1024), 2),
|
| 882 |
+
'last_backup': 'N/A', # You can implement backup tracking
|
| 883 |
+
'integrity_check': 'passed' # You can add actual integrity checks
|
| 884 |
+
}
|
| 885 |
+
|
| 886 |
+
except Exception as e:
|
| 887 |
+
return {
|
| 888 |
+
'status': 'error',
|
| 889 |
+
'error': str(e),
|
| 890 |
+
'table_counts': {},
|
| 891 |
+
'database_size_mb': 0,
|
| 892 |
+
'integrity_check': 'failed'
|
| 893 |
+
}
|
main.py
ADDED
|
@@ -0,0 +1,211 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# main.py (Fixed version)
|
| 2 |
+
import streamlit as st
|
| 3 |
+
import os
|
| 4 |
+
import sys
|
| 5 |
+
|
| 6 |
+
# Add the current directory to Python path
|
| 7 |
+
sys.path.append(os.path.dirname(__file__))
|
| 8 |
+
|
| 9 |
+
# MUST BE FIRST - Page configuration
|
| 10 |
+
st.set_page_config(
|
| 11 |
+
page_title="Sales Management System",
|
| 12 |
+
page_icon="π",
|
| 13 |
+
layout="wide",
|
| 14 |
+
initial_sidebar_state="expanded"
|
| 15 |
+
)
|
| 16 |
+
|
| 17 |
+
# Import utilities - Create these if they don't exist
|
| 18 |
+
try:
|
| 19 |
+
from utils.styling import apply_custom_css
|
| 20 |
+
from utils.helpers import init_session_state, check_module_availability
|
| 21 |
+
from components.database_status import show_database_status
|
| 22 |
+
except ImportError:
|
| 23 |
+
# Create basic fallbacks if utils don't exist
|
| 24 |
+
def apply_custom_css():
|
| 25 |
+
st.markdown("""
|
| 26 |
+
<style>
|
| 27 |
+
.main-header { color: #1f77b4; }
|
| 28 |
+
.section-header { color: #2e86ab; margin-top: 2rem; }
|
| 29 |
+
</style>
|
| 30 |
+
""", unsafe_allow_html=True)
|
| 31 |
+
|
| 32 |
+
def init_session_state():
|
| 33 |
+
if 'db' not in st.session_state:
|
| 34 |
+
st.session_state.db = None
|
| 35 |
+
if 'data_processor' not in st.session_state:
|
| 36 |
+
st.session_state.data_processor = None
|
| 37 |
+
if 'analytics' not in st.session_state:
|
| 38 |
+
st.session_state.analytics = None
|
| 39 |
+
if 'whatsapp_manager' not in st.session_state:
|
| 40 |
+
st.session_state.whatsapp_manager = None
|
| 41 |
+
|
| 42 |
+
def check_module_availability():
|
| 43 |
+
try:
|
| 44 |
+
import pandas, plotly, sqlite3
|
| 45 |
+
MODULES_AVAILABLE = True
|
| 46 |
+
except ImportError:
|
| 47 |
+
MODULES_AVAILABLE = False
|
| 48 |
+
|
| 49 |
+
try:
|
| 50 |
+
import pywhatkit
|
| 51 |
+
WHATSAPP_AVAILABLE = True
|
| 52 |
+
except ImportError:
|
| 53 |
+
WHATSAPP_AVAILABLE = False
|
| 54 |
+
|
| 55 |
+
return MODULES_AVAILABLE, WHATSAPP_AVAILABLE
|
| 56 |
+
|
| 57 |
+
def show_database_status(db):
|
| 58 |
+
if db:
|
| 59 |
+
try:
|
| 60 |
+
health = db.execute_query("SELECT COUNT(*) FROM sqlite_master", log_action=False)
|
| 61 |
+
st.sidebar.success("β
Database Connected")
|
| 62 |
+
except:
|
| 63 |
+
st.sidebar.error("β Database Error")
|
| 64 |
+
else:
|
| 65 |
+
st.sidebar.warning("β οΈ Database Not Initialized")
|
| 66 |
+
|
| 67 |
+
# Apply custom CSS
|
| 68 |
+
apply_custom_css()
|
| 69 |
+
|
| 70 |
+
# Initialize session state
|
| 71 |
+
init_session_state()
|
| 72 |
+
|
| 73 |
+
# Check module availability
|
| 74 |
+
MODULES_AVAILABLE, WHATSAPP_AVAILABLE = check_module_availability()
|
| 75 |
+
|
| 76 |
+
# Initialize components with error handling
|
| 77 |
+
if MODULES_AVAILABLE:
|
| 78 |
+
try:
|
| 79 |
+
from database import DatabaseManager
|
| 80 |
+
from data_processor import DataProcessor
|
| 81 |
+
from analytics import Analytics
|
| 82 |
+
|
| 83 |
+
if st.session_state.db is None:
|
| 84 |
+
st.session_state.db = DatabaseManager()
|
| 85 |
+
st.success("β
Database initialized successfully!")
|
| 86 |
+
|
| 87 |
+
if st.session_state.data_processor is None:
|
| 88 |
+
st.session_state.data_processor = DataProcessor(st.session_state.db)
|
| 89 |
+
|
| 90 |
+
if st.session_state.analytics is None:
|
| 91 |
+
st.session_state.analytics = Analytics(st.session_state.db)
|
| 92 |
+
|
| 93 |
+
if WHATSAPP_AVAILABLE and st.session_state.whatsapp_manager is None:
|
| 94 |
+
try:
|
| 95 |
+
from whatsapp_manager import WhatsAppManager
|
| 96 |
+
st.session_state.whatsapp_manager = WhatsAppManager(st.session_state.db)
|
| 97 |
+
st.success("β
WhatsApp Manager initialized!")
|
| 98 |
+
except Exception as e:
|
| 99 |
+
st.warning(f"β οΈ WhatsApp Manager not available: {e}")
|
| 100 |
+
st.session_state.whatsapp_manager = None
|
| 101 |
+
|
| 102 |
+
except Exception as e:
|
| 103 |
+
st.error(f"β Application initialization failed: {e}")
|
| 104 |
+
st.info("Please check that all required files are in the correct location.")
|
| 105 |
+
|
| 106 |
+
# Assign to local variables for easier access
|
| 107 |
+
db = st.session_state.db
|
| 108 |
+
data_processor = st.session_state.data_processor
|
| 109 |
+
analytics = st.session_state.analytics
|
| 110 |
+
whatsapp_manager = st.session_state.whatsapp_manager
|
| 111 |
+
|
| 112 |
+
# Sidebar navigation
|
| 113 |
+
st.sidebar.markdown("<h1 style='text-align: center;'>π Sales Management</h1>", unsafe_allow_html=True)
|
| 114 |
+
|
| 115 |
+
page = st.sidebar.radio("Navigation", [
|
| 116 |
+
"π Dashboard", "π₯ Customers", "π° Sales", "π³ Payments",
|
| 117 |
+
"π― Demos", "π€ Distributors", "π File Viewer", "π€ Data Import", "π Reports"
|
| 118 |
+
], index=0)
|
| 119 |
+
|
| 120 |
+
# Page routing with error handling
|
| 121 |
+
try:
|
| 122 |
+
if page == "π Dashboard":
|
| 123 |
+
try:
|
| 124 |
+
from pages.dashboard import create_dashboard
|
| 125 |
+
create_dashboard(db, analytics)
|
| 126 |
+
except ImportError:
|
| 127 |
+
st.error("Dashboard page not available. Creating basic dashboard...")
|
| 128 |
+
show_basic_dashboard(db, analytics)
|
| 129 |
+
|
| 130 |
+
elif page == "π₯ Customers":
|
| 131 |
+
try:
|
| 132 |
+
from pages.customers import show_customers_page
|
| 133 |
+
show_customers_page(db, whatsapp_manager)
|
| 134 |
+
except ImportError:
|
| 135 |
+
st.error("Customers page not available")
|
| 136 |
+
|
| 137 |
+
elif page == "π° Sales":
|
| 138 |
+
try:
|
| 139 |
+
from pages.sales import show_sales_page
|
| 140 |
+
show_sales_page(db, whatsapp_manager)
|
| 141 |
+
except ImportError:
|
| 142 |
+
st.error("Sales page not available")
|
| 143 |
+
|
| 144 |
+
elif page == "π― Demos":
|
| 145 |
+
from pages.demos import show_demos_page
|
| 146 |
+
show_demos_page(db, whatsapp_manager)
|
| 147 |
+
|
| 148 |
+
elif page == "π³ Payments":
|
| 149 |
+
from pages.payments import show_payments_page
|
| 150 |
+
show_payments_page(db, whatsapp_manager)
|
| 151 |
+
|
| 152 |
+
elif page == "π€ Distributors":
|
| 153 |
+
try:
|
| 154 |
+
from pages.distributors import show_distributors_page
|
| 155 |
+
show_distributors_page(db, whatsapp_manager)
|
| 156 |
+
except ImportError:
|
| 157 |
+
st.error("Distributors page not available")
|
| 158 |
+
|
| 159 |
+
elif page == "π File Viewer":
|
| 160 |
+
try:
|
| 161 |
+
from pages.file_viewer import show_file_viewer_page
|
| 162 |
+
show_file_viewer_page(db, data_processor)
|
| 163 |
+
except ImportError:
|
| 164 |
+
st.error("File Viewer page not available")
|
| 165 |
+
|
| 166 |
+
elif page == "π€ Data Import":
|
| 167 |
+
try:
|
| 168 |
+
from pages.data_import import show_data_import_page
|
| 169 |
+
show_data_import_page(db, data_processor)
|
| 170 |
+
except ImportError:
|
| 171 |
+
st.error("Data Import page not available")
|
| 172 |
+
|
| 173 |
+
elif page == "π Reports":
|
| 174 |
+
try:
|
| 175 |
+
from pages.reports import show_reports_page
|
| 176 |
+
show_reports_page(db,whatsapp_manager)
|
| 177 |
+
except ImportError:
|
| 178 |
+
st.error("Reports page not available")
|
| 179 |
+
|
| 180 |
+
except Exception as e:
|
| 181 |
+
st.error(f"Application error: {e}")
|
| 182 |
+
st.info("Please check the console for more details.")
|
| 183 |
+
|
| 184 |
+
# Show database status in sidebar
|
| 185 |
+
show_database_status(db)
|
| 186 |
+
|
| 187 |
+
st.sidebar.markdown("---")
|
| 188 |
+
st.sidebar.info("π Sales Management System v2.0")
|
| 189 |
+
|
| 190 |
+
# Basic dashboard fallback
|
| 191 |
+
def show_basic_dashboard(db, analytics):
|
| 192 |
+
st.title("π Sales Dashboard")
|
| 193 |
+
|
| 194 |
+
if db and analytics:
|
| 195 |
+
try:
|
| 196 |
+
sales_summary = analytics.get_sales_summary()
|
| 197 |
+
|
| 198 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 199 |
+
with col1:
|
| 200 |
+
st.metric("Total Sales", f"βΉ{sales_summary.get('total_sales', 0):,.0f}")
|
| 201 |
+
with col2:
|
| 202 |
+
st.metric("Pending Payments", f"βΉ{sales_summary.get('pending_amount', 0):,.0f}")
|
| 203 |
+
with col3:
|
| 204 |
+
st.metric("Total Transactions", sales_summary.get('total_transactions', 0))
|
| 205 |
+
with col4:
|
| 206 |
+
st.metric("Avg Sale", f"βΉ{sales_summary.get('avg_sale_value', 0):,.0f}")
|
| 207 |
+
|
| 208 |
+
except Exception as e:
|
| 209 |
+
st.error(f"Error loading dashboard data: {e}")
|
| 210 |
+
else:
|
| 211 |
+
st.warning("Database or analytics not available")
|
pages/__init__.py
ADDED
|
File without changes
|
pages/__pycache__/__init__.cpython-310.pyc
ADDED
|
Binary file (138 Bytes). View file
|
|
|
pages/__pycache__/customers.cpython-310.pyc
ADDED
|
Binary file (13.4 kB). View file
|
|
|
pages/__pycache__/dashboard.cpython-310.pyc
ADDED
|
Binary file (4.07 kB). View file
|
|
|
pages/__pycache__/data_import.cpython-310.pyc
ADDED
|
Binary file (3.28 kB). View file
|
|
|
pages/__pycache__/demos.cpython-310.pyc
ADDED
|
Binary file (16.8 kB). View file
|
|
|
pages/__pycache__/distributors.cpython-310.pyc
ADDED
|
Binary file (27.6 kB). View file
|
|
|
pages/__pycache__/file_viewer.cpython-310.pyc
ADDED
|
Binary file (11.9 kB). View file
|
|
|
pages/__pycache__/payments.cpython-310.pyc
ADDED
|
Binary file (16.1 kB). View file
|
|
|
pages/__pycache__/reports.cpython-310.pyc
ADDED
|
Binary file (31 kB). View file
|
|
|
pages/__pycache__/sales.cpython-310.pyc
ADDED
|
Binary file (14.1 kB). View file
|
|
|