Upload 12 files
Browse files- .gitattributes +2 -35
- .gitignore +0 -0
- analytics.py +192 -0
- automation.py +90 -0
- data_processor.py +710 -0
- database.py +1106 -0
- main.py +230 -0
- requirements.txt +8 -0
- sales_data_processor.py +502 -0
- sales_management.db +3 -0
- sales_manager.py +92 -0
- whatsapp_manager.py +402 -0
.gitattributes
CHANGED
|
@@ -1,35 +1,2 @@
|
|
| 1 |
-
*.
|
| 2 |
-
|
| 3 |
-
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
-
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
-
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
-
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
-
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
-
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
-
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
-
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
-
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
-
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
-
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
-
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
-
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
-
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
-
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
-
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
-
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
-
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
-
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
-
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
-
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
-
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
-
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
-
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
-
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
-
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
-
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
-
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
-
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
-
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
-
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
-
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
-
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 1 |
+
*.db filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
lfs_tmp/sales_management.db filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
.gitignore
ADDED
|
Binary file (86 Bytes). View file
|
|
|
analytics.py
ADDED
|
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
|
| 5 |
+
class Analytics:
|
| 6 |
+
def __init__(self, db_manager):
|
| 7 |
+
self.db = db_manager
|
| 8 |
+
|
| 9 |
+
def get_sales_summary(self):
|
| 10 |
+
"""Get comprehensive sales summary statistics"""
|
| 11 |
+
try:
|
| 12 |
+
sales_df = self.db.get_dataframe('sales')
|
| 13 |
+
payments_df = self.db.get_dataframe('payments')
|
| 14 |
+
|
| 15 |
+
if sales_df.empty:
|
| 16 |
+
return {
|
| 17 |
+
'total_sales': 0,
|
| 18 |
+
'total_payments': 0,
|
| 19 |
+
'pending_amount': 0,
|
| 20 |
+
'total_transactions': 0,
|
| 21 |
+
'avg_sale_value': 0
|
| 22 |
+
}
|
| 23 |
+
|
| 24 |
+
total_sales = sales_df['total_amount'].sum()
|
| 25 |
+
total_payments = payments_df['amount'].sum() if not payments_df.empty else 0
|
| 26 |
+
pending_amount = total_sales - total_payments
|
| 27 |
+
|
| 28 |
+
return {
|
| 29 |
+
'total_sales': total_sales,
|
| 30 |
+
'total_payments': total_payments,
|
| 31 |
+
'pending_amount': pending_amount,
|
| 32 |
+
'total_transactions': len(sales_df),
|
| 33 |
+
'avg_sale_value': sales_df['total_amount'].mean()
|
| 34 |
+
}
|
| 35 |
+
except Exception as e:
|
| 36 |
+
return {
|
| 37 |
+
'total_sales': 0,
|
| 38 |
+
'total_payments': 0,
|
| 39 |
+
'pending_amount': 0,
|
| 40 |
+
'total_transactions': 0,
|
| 41 |
+
'avg_sale_value': 0
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
def get_customer_analysis(self):
|
| 45 |
+
"""Analyze customer data"""
|
| 46 |
+
try:
|
| 47 |
+
customers_df = self.db.get_dataframe('customers')
|
| 48 |
+
sales_df = self.db.get_dataframe('sales')
|
| 49 |
+
|
| 50 |
+
if customers_df.empty:
|
| 51 |
+
return {
|
| 52 |
+
'total_customers': 0,
|
| 53 |
+
'village_distribution': {},
|
| 54 |
+
'top_customers': {}
|
| 55 |
+
}
|
| 56 |
+
|
| 57 |
+
# Customer distribution by village
|
| 58 |
+
village_stats = customers_df['village'].value_counts().head(10)
|
| 59 |
+
|
| 60 |
+
# Top customers by spending
|
| 61 |
+
if not sales_df.empty:
|
| 62 |
+
customer_sales = sales_df.groupby('customer_id')['total_amount'].sum()
|
| 63 |
+
top_customers = customer_sales.nlargest(10)
|
| 64 |
+
else:
|
| 65 |
+
top_customers = pd.Series(dtype=float)
|
| 66 |
+
|
| 67 |
+
return {
|
| 68 |
+
'total_customers': len(customers_df),
|
| 69 |
+
'village_distribution': village_stats.to_dict(),
|
| 70 |
+
'top_customers': top_customers.to_dict()
|
| 71 |
+
}
|
| 72 |
+
except Exception as e:
|
| 73 |
+
return {
|
| 74 |
+
'total_customers': 0,
|
| 75 |
+
'village_distribution': {},
|
| 76 |
+
'top_customers': {}
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
def get_payment_analysis(self):
|
| 80 |
+
"""Analyze payment data"""
|
| 81 |
+
try:
|
| 82 |
+
pending_payments = self.db.get_pending_payments()
|
| 83 |
+
payments_df = self.db.get_dataframe('payments')
|
| 84 |
+
|
| 85 |
+
if pending_payments.empty:
|
| 86 |
+
return {
|
| 87 |
+
'total_pending': 0,
|
| 88 |
+
'customer_pending': {},
|
| 89 |
+
'payment_methods': {}
|
| 90 |
+
}
|
| 91 |
+
|
| 92 |
+
# Group by customer
|
| 93 |
+
customer_pending = pending_payments.groupby('customer_id')['pending_amount'].sum()
|
| 94 |
+
|
| 95 |
+
# Payment method distribution
|
| 96 |
+
if not payments_df.empty:
|
| 97 |
+
payment_methods = payments_df['payment_method'].value_counts()
|
| 98 |
+
else:
|
| 99 |
+
payment_methods = pd.Series(dtype=object)
|
| 100 |
+
|
| 101 |
+
return {
|
| 102 |
+
'total_pending': pending_payments['pending_amount'].sum(),
|
| 103 |
+
'customer_pending': customer_pending.to_dict(),
|
| 104 |
+
'payment_methods': payment_methods.to_dict()
|
| 105 |
+
}
|
| 106 |
+
except Exception as e:
|
| 107 |
+
return {
|
| 108 |
+
'total_pending': 0,
|
| 109 |
+
'customer_pending': {},
|
| 110 |
+
'payment_methods': {}
|
| 111 |
+
}
|
| 112 |
+
|
| 113 |
+
def get_demo_conversion_rates(self):
|
| 114 |
+
"""Calculate demo conversion rates"""
|
| 115 |
+
try:
|
| 116 |
+
demos_df = self.db.get_demo_conversions()
|
| 117 |
+
|
| 118 |
+
if demos_df.empty:
|
| 119 |
+
return {
|
| 120 |
+
'total_demos': 0,
|
| 121 |
+
'converted_demos': 0,
|
| 122 |
+
'conversion_rate': 0
|
| 123 |
+
}
|
| 124 |
+
|
| 125 |
+
total_demos = len(demos_df)
|
| 126 |
+
converted_demos = len(demos_df[demos_df['conversion_status'] == 'Converted'])
|
| 127 |
+
conversion_rate = (converted_demos / total_demos) * 100 if total_demos > 0 else 0
|
| 128 |
+
|
| 129 |
+
return {
|
| 130 |
+
'total_demos': total_demos,
|
| 131 |
+
'converted_demos': converted_demos,
|
| 132 |
+
'conversion_rate': conversion_rate
|
| 133 |
+
}
|
| 134 |
+
except Exception as e:
|
| 135 |
+
return {
|
| 136 |
+
'total_demos': 0,
|
| 137 |
+
'converted_demos': 0,
|
| 138 |
+
'conversion_rate': 0
|
| 139 |
+
}
|
| 140 |
+
|
| 141 |
+
def get_sales_trend(self):
|
| 142 |
+
"""Get sales trend data for charts"""
|
| 143 |
+
try:
|
| 144 |
+
sales_df = self.db.get_dataframe('sales')
|
| 145 |
+
|
| 146 |
+
if sales_df.empty:
|
| 147 |
+
return pd.DataFrame()
|
| 148 |
+
|
| 149 |
+
# Convert sale_date to datetime if it's not
|
| 150 |
+
sales_df['sale_date'] = pd.to_datetime(sales_df['sale_date'])
|
| 151 |
+
|
| 152 |
+
# Group by date
|
| 153 |
+
daily_sales = sales_df.groupby('sale_date')['total_amount'].sum().reset_index()
|
| 154 |
+
daily_sales = daily_sales.sort_values('sale_date')
|
| 155 |
+
|
| 156 |
+
return daily_sales
|
| 157 |
+
except Exception as e:
|
| 158 |
+
return pd.DataFrame()
|
| 159 |
+
|
| 160 |
+
def get_payment_distribution(self):
|
| 161 |
+
"""Get payment distribution for charts"""
|
| 162 |
+
try:
|
| 163 |
+
payments_df = self.db.get_dataframe('payments')
|
| 164 |
+
|
| 165 |
+
if payments_df.empty:
|
| 166 |
+
return pd.DataFrame()
|
| 167 |
+
|
| 168 |
+
payment_dist = payments_df.groupby('payment_method')['amount'].sum().reset_index()
|
| 169 |
+
return payment_dist
|
| 170 |
+
except Exception as e:
|
| 171 |
+
return pd.DataFrame()
|
| 172 |
+
|
| 173 |
+
def get_product_performance(self):
|
| 174 |
+
"""Get product performance data"""
|
| 175 |
+
try:
|
| 176 |
+
sale_items_df = self.db.get_dataframe('sale_items', '''
|
| 177 |
+
SELECT si.*, p.product_name
|
| 178 |
+
FROM sale_items si
|
| 179 |
+
JOIN products p ON si.product_id = p.product_id
|
| 180 |
+
''')
|
| 181 |
+
|
| 182 |
+
if sale_items_df.empty:
|
| 183 |
+
return pd.DataFrame()
|
| 184 |
+
|
| 185 |
+
product_perf = sale_items_df.groupby('product_name').agg({
|
| 186 |
+
'quantity': 'sum',
|
| 187 |
+
'amount': 'sum'
|
| 188 |
+
}).reset_index()
|
| 189 |
+
|
| 190 |
+
return product_perf
|
| 191 |
+
except Exception as e:
|
| 192 |
+
return pd.DataFrame()
|
automation.py
ADDED
|
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# enhanced_automation.py
|
| 2 |
+
import schedule
|
| 3 |
+
import time
|
| 4 |
+
import smtplib
|
| 5 |
+
from email.mime.text import MimeText
|
| 6 |
+
from email.mime.multipart import MimeMultipart
|
| 7 |
+
from datetime import datetime, timedelta
|
| 8 |
+
from analytics import Analytics
|
| 9 |
+
import os
|
| 10 |
+
|
| 11 |
+
class AutomationManager:
|
| 12 |
+
def __init__(self, db_manager, whatsapp_manager):
|
| 13 |
+
self.db = db_manager
|
| 14 |
+
self.whatsapp = whatsapp_manager
|
| 15 |
+
|
| 16 |
+
def daily_payment_reminders(self):
|
| 17 |
+
"""Send payment reminders for overdue payments"""
|
| 18 |
+
overdue_payments = self.db.get_pending_payments()
|
| 19 |
+
|
| 20 |
+
for _, payment in overdue_payments.iterrows():
|
| 21 |
+
if payment['pending_amount'] > 0:
|
| 22 |
+
customer = self.db.get_dataframe('customers',
|
| 23 |
+
f"SELECT * FROM customers WHERE customer_id = {payment['customer_id']}")
|
| 24 |
+
|
| 25 |
+
if not customer.empty:
|
| 26 |
+
customer_data = customer.iloc[0]
|
| 27 |
+
message = f"""Hello {customer_data['name']},
|
| 28 |
+
|
| 29 |
+
This is a friendly reminder that your payment of ₹{payment['pending_amount']:,.2f} for invoice {payment['invoice_no']} is overdue.
|
| 30 |
+
|
| 31 |
+
Please make the payment at your earliest convenience.
|
| 32 |
+
|
| 33 |
+
Thank you,
|
| 34 |
+
Sales Team"""
|
| 35 |
+
|
| 36 |
+
self.whatsapp.send_message(customer_data['mobile'], message)
|
| 37 |
+
|
| 38 |
+
def demo_followups(self):
|
| 39 |
+
"""Send follow-up messages for demos"""
|
| 40 |
+
upcoming_followups = self.db.get_dataframe('demos', '''
|
| 41 |
+
SELECT d.*, c.name as customer_name, c.mobile, p.product_name
|
| 42 |
+
FROM demos d
|
| 43 |
+
JOIN customers c ON d.customer_id = c.customer_id
|
| 44 |
+
JOIN products p ON d.product_id = p.product_id
|
| 45 |
+
WHERE d.follow_up_date = date('now')
|
| 46 |
+
AND d.conversion_status = 'Not Converted'
|
| 47 |
+
''')
|
| 48 |
+
|
| 49 |
+
for _, demo in upcoming_followups.iterrows():
|
| 50 |
+
message = f"""Hello {demo['customer_name']},
|
| 51 |
+
|
| 52 |
+
Following up on your demo of {demo['product_name']} on {demo['demo_date']}.
|
| 53 |
+
|
| 54 |
+
How was your experience? Would you like to place an order or need another demo?
|
| 55 |
+
|
| 56 |
+
Best regards,
|
| 57 |
+
Sales Team"""
|
| 58 |
+
|
| 59 |
+
self.whatsapp.send_message(demo['mobile'], message)
|
| 60 |
+
|
| 61 |
+
def weekly_performance_report(self):
|
| 62 |
+
"""Generate and send weekly performance report"""
|
| 63 |
+
analytics = Analytics(self.db)
|
| 64 |
+
|
| 65 |
+
sales_summary = analytics.get_sales_summary()
|
| 66 |
+
demo_stats = analytics.get_demo_conversion_rates()
|
| 67 |
+
payment_analysis = analytics.get_payment_analysis()
|
| 68 |
+
|
| 69 |
+
report = f"""
|
| 70 |
+
📊 WEEKLY PERFORMANCE REPORT
|
| 71 |
+
----------------------------
|
| 72 |
+
Total Sales: ₹{sales_summary.get('total_sales', 0):,.2f}
|
| 73 |
+
Pending Payments: ₹{sales_summary.get('pending_amount', 0):,.2f}
|
| 74 |
+
Demo Conversion Rate: {demo_stats.get('conversion_rate', 0):.1f}%
|
| 75 |
+
Total Customers: {analytics.get_customer_analysis().get('total_customers', 0)}
|
| 76 |
+
|
| 77 |
+
Generated on: {datetime.now().strftime('%Y-%m-%d %H:%M')}
|
| 78 |
+
"""
|
| 79 |
+
|
| 80 |
+
# You can extend this to email the report
|
| 81 |
+
self._save_report(report)
|
| 82 |
+
return report
|
| 83 |
+
|
| 84 |
+
def _save_report(self, report):
|
| 85 |
+
"""Save report to file"""
|
| 86 |
+
filename = f"reports/weekly_report_{datetime.now().strftime('%Y%m%d')}.txt"
|
| 87 |
+
os.makedirs('reports', exist_ok=True)
|
| 88 |
+
|
| 89 |
+
with open(filename, 'w') as f:
|
| 90 |
+
f.write(report)
|
data_processor.py
ADDED
|
@@ -0,0 +1,710 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import pandas as pd
|
| 2 |
+
import numpy as np
|
| 3 |
+
import os
|
| 4 |
+
import re
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
import logging
|
| 7 |
+
|
| 8 |
+
# Set up logging
|
| 9 |
+
logging.basicConfig(level=logging.INFO)
|
| 10 |
+
logger = logging.getLogger(__name__)
|
| 11 |
+
|
| 12 |
+
class DataProcessor:
|
| 13 |
+
def __init__(self, db_manager):
|
| 14 |
+
self.db = db_manager
|
| 15 |
+
self.product_mapping = self._create_product_mapping()
|
| 16 |
+
|
| 17 |
+
def _create_product_mapping(self):
|
| 18 |
+
"""Create product mapping from database"""
|
| 19 |
+
try:
|
| 20 |
+
products_df = self.db.get_dataframe('products')
|
| 21 |
+
return {row['product_name'].upper(): row['product_id'] for _, row in products_df.iterrows()}
|
| 22 |
+
except Exception as e:
|
| 23 |
+
logger.error(f"Error creating product mapping: {e}")
|
| 24 |
+
return {}
|
| 25 |
+
|
| 26 |
+
def process_excel_file(self, file_path):
|
| 27 |
+
"""Enhanced file processing with all data types"""
|
| 28 |
+
try:
|
| 29 |
+
file_name = os.path.basename(file_path)
|
| 30 |
+
print(f"🚀 Processing file: {file_name}")
|
| 31 |
+
|
| 32 |
+
excel_file = pd.ExcelFile(file_path)
|
| 33 |
+
processed_sheets = 0
|
| 34 |
+
|
| 35 |
+
for sheet_name in excel_file.sheet_names:
|
| 36 |
+
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
| 37 |
+
df_clean = self._clean_dataframe(df)
|
| 38 |
+
|
| 39 |
+
print(f"\n📊 Sheet: {sheet_name}")
|
| 40 |
+
print(f" Columns: {df_clean.columns.tolist()}")
|
| 41 |
+
|
| 42 |
+
# Check all types with priority
|
| 43 |
+
is_payment = self._is_payment_sheet(df_clean)
|
| 44 |
+
is_sales = self._is_sales_sheet(df_clean)
|
| 45 |
+
is_customer = self._is_customer_sheet(df_clean)
|
| 46 |
+
is_distributor = self._is_distributor_sheet(df_clean)
|
| 47 |
+
|
| 48 |
+
print(f" Detection - Payment: {is_payment}, Sales: {is_sales}, Customer: {is_customer}, Distributor: {is_distributor}")
|
| 49 |
+
|
| 50 |
+
processed = False
|
| 51 |
+
if is_payment:
|
| 52 |
+
processed = self.process_payment_sheet(df_clean, file_name, sheet_name)
|
| 53 |
+
elif is_sales:
|
| 54 |
+
processed = self.process_sales_sheet(df_clean, file_name, sheet_name)
|
| 55 |
+
elif is_distributor:
|
| 56 |
+
processed = self.process_distributor_sheet(df_clean, file_name, sheet_name)
|
| 57 |
+
elif is_customer:
|
| 58 |
+
processed = self.process_customer_sheet(df_clean, file_name, sheet_name)
|
| 59 |
+
|
| 60 |
+
if processed:
|
| 61 |
+
processed_sheets += 1
|
| 62 |
+
print(f" ✅ Successfully processed as detected type")
|
| 63 |
+
else:
|
| 64 |
+
print(f" ❌ Failed to process")
|
| 65 |
+
|
| 66 |
+
print(f"\n🎉 File processing complete: {processed_sheets}/{len(excel_file.sheet_names)} sheets processed")
|
| 67 |
+
return processed_sheets > 0
|
| 68 |
+
|
| 69 |
+
except Exception as e:
|
| 70 |
+
print(f"💥 Error processing file {file_path}: {e}")
|
| 71 |
+
return False
|
| 72 |
+
|
| 73 |
+
def _clean_dataframe(self, df):
|
| 74 |
+
"""Clean and prepare dataframe for processing"""
|
| 75 |
+
# Remove completely empty rows and columns
|
| 76 |
+
df = df.dropna(how='all').dropna(axis=1, how='all')
|
| 77 |
+
|
| 78 |
+
# Reset index
|
| 79 |
+
df = df.reset_index(drop=True)
|
| 80 |
+
|
| 81 |
+
# Convert column names to string and clean them
|
| 82 |
+
df.columns = [str(col).strip().upper() for col in df.columns]
|
| 83 |
+
|
| 84 |
+
return df
|
| 85 |
+
|
| 86 |
+
def _is_sales_sheet(self, df):
|
| 87 |
+
"""Check if sheet contains sales data"""
|
| 88 |
+
required_columns = ['INVOICE', 'CUSTOMER', 'PRODUCT', 'QUANTITY', 'AMOUNT']
|
| 89 |
+
existing_columns = [col for col in df.columns if any(req in col for req in required_columns)]
|
| 90 |
+
return len(existing_columns) >= 3
|
| 91 |
+
|
| 92 |
+
def _is_customer_sheet(self, df):
|
| 93 |
+
"""Check if sheet contains customer data"""
|
| 94 |
+
required_columns = ['CUSTOMER', 'NAME', 'MOBILE', 'VILLAGE']
|
| 95 |
+
existing_columns = [col for col in df.columns if any(req in col for req in required_columns)]
|
| 96 |
+
return len(existing_columns) >= 2
|
| 97 |
+
|
| 98 |
+
def _is_distributor_sheet(self, df):
|
| 99 |
+
"""Check if sheet contains distributor data"""
|
| 100 |
+
required_columns = ['DISTRIBUTOR', 'MANTRI', 'SABHASAD']
|
| 101 |
+
existing_columns = [col for col in df.columns if any(req in col for req in required_columns)]
|
| 102 |
+
return len(existing_columns) >= 2
|
| 103 |
+
|
| 104 |
+
def process_sales_sheet(self, df, file_name, sheet_name):
|
| 105 |
+
"""Process sales data from sheet"""
|
| 106 |
+
try:
|
| 107 |
+
processed_rows = 0
|
| 108 |
+
|
| 109 |
+
for index, row in df.iterrows():
|
| 110 |
+
try:
|
| 111 |
+
# Skip header rows and empty rows
|
| 112 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 113 |
+
continue
|
| 114 |
+
|
| 115 |
+
# Extract sales data (adjust column indices based on your Excel structure)
|
| 116 |
+
invoice_no = str(row.iloc[0]) if len(row) > 0 else f"INV_{datetime.now().strftime('%Y%m%d%H%M%S')}_{index}"
|
| 117 |
+
customer_name = str(row.iloc[1]) if len(row) > 1 else "Unknown Customer"
|
| 118 |
+
product_name = str(row.iloc[2]) if len(row) > 2 else "Unknown Product"
|
| 119 |
+
quantity = self._safe_float(row.iloc[3]) if len(row) > 3 else 0
|
| 120 |
+
amount = self._safe_float(row.iloc[4]) if len(row) > 4 else 0
|
| 121 |
+
|
| 122 |
+
# Get or create customer
|
| 123 |
+
customer_id = self._get_or_create_customer(customer_name, "", "", "", "")
|
| 124 |
+
|
| 125 |
+
# Get product ID
|
| 126 |
+
product_id = self._get_product_id(product_name)
|
| 127 |
+
|
| 128 |
+
if customer_id and product_id and quantity > 0:
|
| 129 |
+
# Create sale
|
| 130 |
+
sale_date = datetime.now().date()
|
| 131 |
+
sale_items = [{
|
| 132 |
+
'product_id': product_id,
|
| 133 |
+
'quantity': quantity,
|
| 134 |
+
'rate': amount / quantity if quantity > 0 else 0
|
| 135 |
+
}]
|
| 136 |
+
|
| 137 |
+
self.db.add_sale(invoice_no, customer_id, sale_date, sale_items)
|
| 138 |
+
processed_rows += 1
|
| 139 |
+
|
| 140 |
+
except Exception as e:
|
| 141 |
+
logger.warning(f"Error processing row {index} in sales sheet: {e}")
|
| 142 |
+
continue
|
| 143 |
+
|
| 144 |
+
logger.info(f"Processed {processed_rows} sales from {sheet_name}")
|
| 145 |
+
return processed_rows > 0
|
| 146 |
+
|
| 147 |
+
except Exception as e:
|
| 148 |
+
logger.error(f"Error processing sales sheet: {e}")
|
| 149 |
+
return False
|
| 150 |
+
|
| 151 |
+
def process_customer_sheet(self, df, file_name, sheet_name):
|
| 152 |
+
"""Process customer data from sheet with duplicate handling"""
|
| 153 |
+
try:
|
| 154 |
+
processed_rows = 0
|
| 155 |
+
duplicate_rows = 0
|
| 156 |
+
error_rows = 0
|
| 157 |
+
|
| 158 |
+
print(f"🔄 Processing customer sheet: {sheet_name} with {len(df)} rows")
|
| 159 |
+
|
| 160 |
+
for index, row in df.iterrows():
|
| 161 |
+
try:
|
| 162 |
+
# Skip header rows and empty rows
|
| 163 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 164 |
+
continue
|
| 165 |
+
|
| 166 |
+
# Extract customer data
|
| 167 |
+
customer_code = str(row.iloc[0]) if len(row) > 0 and pd.notna(row.iloc[0]) else None
|
| 168 |
+
name = str(row.iloc[1]) if len(row) > 1 and pd.notna(row.iloc[1]) else "Unknown"
|
| 169 |
+
mobile = str(row.iloc[2]) if len(row) > 2 and pd.notna(row.iloc[2]) else ""
|
| 170 |
+
|
| 171 |
+
# Extract location - adjust indices based on your Excel structure
|
| 172 |
+
village = str(row.iloc[3]) if len(row) > 3 and pd.notna(row.iloc[3]) else ""
|
| 173 |
+
taluka = str(row.iloc[4]) if len(row) > 4 and pd.notna(row.iloc[4]) else ""
|
| 174 |
+
district = str(row.iloc[5]) if len(row) > 5 and pd.notna(row.iloc[5]) else ""
|
| 175 |
+
|
| 176 |
+
# If village is combined with name, split them
|
| 177 |
+
if not village and "(" in name:
|
| 178 |
+
name_parts = name.split("(")
|
| 179 |
+
if len(name_parts) > 1:
|
| 180 |
+
name = name_parts[0].strip()
|
| 181 |
+
village = name_parts[1].replace(")", "").strip()
|
| 182 |
+
|
| 183 |
+
# Skip if no name
|
| 184 |
+
if not name or name == "Unknown":
|
| 185 |
+
continue
|
| 186 |
+
|
| 187 |
+
# Add customer to database (method now handles duplicates)
|
| 188 |
+
customer_id = self.db.add_customer(name, mobile, village, taluka, district, customer_code)
|
| 189 |
+
|
| 190 |
+
if customer_id and customer_id != -1:
|
| 191 |
+
processed_rows += 1
|
| 192 |
+
if processed_rows % 50 == 0: # Progress update
|
| 193 |
+
print(f"📊 Processed {processed_rows} customers...")
|
| 194 |
+
else:
|
| 195 |
+
duplicate_rows += 1
|
| 196 |
+
|
| 197 |
+
except Exception as e:
|
| 198 |
+
error_rows += 1
|
| 199 |
+
if error_rows <= 5: # Only log first few errors
|
| 200 |
+
print(f"❌ Error in row {index}: {e}")
|
| 201 |
+
continue
|
| 202 |
+
|
| 203 |
+
print(f"🎉 Customer processing complete: {processed_rows} added, {duplicate_rows} duplicates, {error_rows} errors")
|
| 204 |
+
return processed_rows > 0
|
| 205 |
+
|
| 206 |
+
except Exception as e:
|
| 207 |
+
print(f"💥 Error processing customer sheet: {e}")
|
| 208 |
+
return False
|
| 209 |
+
|
| 210 |
+
def process_distributor_sheet(self, df, file_name, sheet_name):
|
| 211 |
+
"""Process distributor data from sheet"""
|
| 212 |
+
try:
|
| 213 |
+
processed_rows = 0
|
| 214 |
+
|
| 215 |
+
# Clean the dataframe - convert column names to consistent format
|
| 216 |
+
df.columns = [str(col).strip().upper() for col in df.columns]
|
| 217 |
+
print(f"DEBUG: Processing distributor sheet with columns: {df.columns.tolist()}")
|
| 218 |
+
|
| 219 |
+
for index, row in df.iterrows():
|
| 220 |
+
try:
|
| 221 |
+
# Skip header rows and empty rows
|
| 222 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 223 |
+
print(f"DEBUG: Skipping row {index} - header or empty")
|
| 224 |
+
continue
|
| 225 |
+
|
| 226 |
+
print(f"DEBUG: Processing row {index}")
|
| 227 |
+
|
| 228 |
+
# Extract distributor data based on YOUR ACTUAL COLUMNS
|
| 229 |
+
# Map your Excel columns to database fields
|
| 230 |
+
name = self._extract_distributor_name(row) # We'll use Village + Taluka as name
|
| 231 |
+
village = self._safe_get(row, 'Village', 1)
|
| 232 |
+
taluka = self._safe_get(row, 'Taluka', 2)
|
| 233 |
+
district = self._safe_get(row, 'District', 3)
|
| 234 |
+
mantri_name = self._safe_get(row, 'Mantri_Name', 4)
|
| 235 |
+
mantri_mobile = self._safe_get(row, 'Mantri_Mobile', 5)
|
| 236 |
+
sabhasad_count = self._safe_get_int(row, 'Sabhasad', 6)
|
| 237 |
+
contact_in_group = self._safe_get_int(row, 'Contact_In_Group', 7)
|
| 238 |
+
|
| 239 |
+
print(f"DEBUG: Extracted - Village: {village}, Taluka: {taluka}, Mantri: {mantri_name}")
|
| 240 |
+
|
| 241 |
+
# Validate we have essential data
|
| 242 |
+
if not village or not taluka:
|
| 243 |
+
print(f"DEBUG: Skipping - missing village or taluka")
|
| 244 |
+
continue
|
| 245 |
+
|
| 246 |
+
# Create distributor name from village + taluka
|
| 247 |
+
if not name:
|
| 248 |
+
name = f"{village} - {taluka}"
|
| 249 |
+
|
| 250 |
+
# Add distributor to database with ALL fields
|
| 251 |
+
self.db.execute_query('''
|
| 252 |
+
INSERT OR REPLACE INTO distributors
|
| 253 |
+
(name, village, taluka, district, mantri_name, mantri_mobile, sabhasad_count, contact_in_group)
|
| 254 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
|
| 255 |
+
''', (name, village, taluka, district, mantri_name, mantri_mobile, sabhasad_count, contact_in_group))
|
| 256 |
+
|
| 257 |
+
processed_rows += 1
|
| 258 |
+
print(f"DEBUG: Successfully added distributor: {name}")
|
| 259 |
+
|
| 260 |
+
except Exception as e:
|
| 261 |
+
logger.warning(f"Error processing row {index} in distributor sheet: {e}")
|
| 262 |
+
continue
|
| 263 |
+
|
| 264 |
+
logger.info(f"Processed {processed_rows} distributors from {sheet_name}")
|
| 265 |
+
return processed_rows > 0
|
| 266 |
+
|
| 267 |
+
except Exception as e:
|
| 268 |
+
logger.error(f"Error processing distributor sheet: {e}")
|
| 269 |
+
return False
|
| 270 |
+
|
| 271 |
+
def _extract_distributor_name(self, row):
|
| 272 |
+
"""Extract distributor name from village and taluka"""
|
| 273 |
+
village = self._safe_get(row, 'Village', 1)
|
| 274 |
+
taluka = self._safe_get(row, 'Taluka', 2)
|
| 275 |
+
|
| 276 |
+
if village and taluka:
|
| 277 |
+
return f"{village} - {taluka}"
|
| 278 |
+
elif village:
|
| 279 |
+
return village
|
| 280 |
+
elif taluka:
|
| 281 |
+
return taluka
|
| 282 |
+
else:
|
| 283 |
+
return "Unknown Distributor"
|
| 284 |
+
|
| 285 |
+
def _safe_get(self, row, column_name, default_index):
|
| 286 |
+
"""Safely get value from row by column name or index"""
|
| 287 |
+
try:
|
| 288 |
+
# Try by column name first
|
| 289 |
+
if column_name in row.index:
|
| 290 |
+
value = row[column_name]
|
| 291 |
+
if pd.isna(value):
|
| 292 |
+
return ""
|
| 293 |
+
return str(value).strip()
|
| 294 |
+
|
| 295 |
+
# Fallback to index
|
| 296 |
+
if len(row) > default_index:
|
| 297 |
+
value = row.iloc[default_index]
|
| 298 |
+
if pd.isna(value):
|
| 299 |
+
return ""
|
| 300 |
+
return str(value).strip()
|
| 301 |
+
|
| 302 |
+
return ""
|
| 303 |
+
except Exception:
|
| 304 |
+
return ""
|
| 305 |
+
|
| 306 |
+
def _safe_get_int(self, row, column_name, default_index):
|
| 307 |
+
"""Safely get integer value from row"""
|
| 308 |
+
try:
|
| 309 |
+
str_value = self._safe_get(row, column_name, default_index)
|
| 310 |
+
if str_value and str_value.strip():
|
| 311 |
+
return int(float(str_value)) # Handle both int and float strings
|
| 312 |
+
return 0
|
| 313 |
+
except (ValueError, TypeError):
|
| 314 |
+
return 0
|
| 315 |
+
|
| 316 |
+
def _is_header_row(self, row):
|
| 317 |
+
"""Check if row is a header row - updated for your data"""
|
| 318 |
+
if len(row) == 0:
|
| 319 |
+
return True
|
| 320 |
+
|
| 321 |
+
first_value = str(row.iloc[0]) if pd.notna(row.iloc[0]) else ""
|
| 322 |
+
first_value_upper = first_value.upper()
|
| 323 |
+
|
| 324 |
+
# Header indicators for YOUR data
|
| 325 |
+
header_indicators = [
|
| 326 |
+
'DATE', 'VILLAGE', 'TALUKA', 'DISTRICT', 'MANTRI',
|
| 327 |
+
'SABHASAD', 'CONTACT', 'TOTAL', 'SR', 'NO', 'NAME'
|
| 328 |
+
]
|
| 329 |
+
|
| 330 |
+
# If first value contains any header indicator, it's likely a header
|
| 331 |
+
return any(indicator in first_value_upper for indicator in header_indicators)
|
| 332 |
+
|
| 333 |
+
def _safe_float(self, value):
|
| 334 |
+
"""Safely convert value to float"""
|
| 335 |
+
try:
|
| 336 |
+
if pd.isna(value):
|
| 337 |
+
return 0.0
|
| 338 |
+
return float(value)
|
| 339 |
+
except (ValueError, TypeError):
|
| 340 |
+
return 0.0
|
| 341 |
+
|
| 342 |
+
def _get_or_create_customer(self, name, mobile, village, taluka, district):
|
| 343 |
+
"""Get existing customer or create new one"""
|
| 344 |
+
try:
|
| 345 |
+
# Check if customer exists
|
| 346 |
+
result = self.db.execute_query(
|
| 347 |
+
'SELECT customer_id FROM customers WHERE name = ? AND mobile = ?',
|
| 348 |
+
(name, mobile)
|
| 349 |
+
)
|
| 350 |
+
|
| 351 |
+
if result:
|
| 352 |
+
return result[0][0]
|
| 353 |
+
else:
|
| 354 |
+
# Create new customer
|
| 355 |
+
customer_code = f"CUST_{datetime.now().strftime('%Y%m%d%H%M%S')}"
|
| 356 |
+
self.db.add_customer(name, mobile, village, taluka, district, customer_code)
|
| 357 |
+
|
| 358 |
+
# Get the new customer ID
|
| 359 |
+
result = self.db.execute_query(
|
| 360 |
+
'SELECT customer_id FROM customers WHERE customer_code = ?',
|
| 361 |
+
(customer_code,)
|
| 362 |
+
)
|
| 363 |
+
return result[0][0] if result else None
|
| 364 |
+
|
| 365 |
+
except Exception as e:
|
| 366 |
+
logger.error(f"Error getting/creating customer: {e}")
|
| 367 |
+
return None
|
| 368 |
+
|
| 369 |
+
def _get_product_id(self, product_name):
|
| 370 |
+
"""Get product ID from product name"""
|
| 371 |
+
clean_name = product_name.upper().strip()
|
| 372 |
+
return self.product_mapping.get(clean_name, None)
|
| 373 |
+
|
| 374 |
+
def _extract_location_from_name(self, name):
|
| 375 |
+
"""Extract village and taluka from customer name"""
|
| 376 |
+
name_upper = name.upper()
|
| 377 |
+
|
| 378 |
+
locations = {
|
| 379 |
+
'AMIYAD': ('Amiyad', ''),
|
| 380 |
+
'AMVAD': ('Amvad', ''),
|
| 381 |
+
'ANKALAV': ('', 'Ankalav'),
|
| 382 |
+
'PETLAD': ('', 'Petlad'),
|
| 383 |
+
'BORSAD': ('', 'Borsad'),
|
| 384 |
+
'VADODARA': ('', 'Vadodara'),
|
| 385 |
+
'ANAND': ('', 'Anand'),
|
| 386 |
+
'NADIAD': ('', 'Nadiad')
|
| 387 |
+
}
|
| 388 |
+
|
| 389 |
+
village, taluka = "", ""
|
| 390 |
+
for location, (v, t) in locations.items():
|
| 391 |
+
if location in name_upper:
|
| 392 |
+
if v:
|
| 393 |
+
village = v
|
| 394 |
+
if t:
|
| 395 |
+
taluka = t
|
| 396 |
+
break
|
| 397 |
+
|
| 398 |
+
return village, taluka
|
| 399 |
+
|
| 400 |
+
|
| 401 |
+
|
| 402 |
+
# Add to DataProcessor class in data_processor.py
|
| 403 |
+
|
| 404 |
+
def _is_sales_sheet(self, df):
|
| 405 |
+
"""Enhanced sales sheet detection with better logging"""
|
| 406 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 407 |
+
|
| 408 |
+
print(f"\n🔍 ENHANCED SALES DETECTION:")
|
| 409 |
+
print(f" All columns: {columns_lower}")
|
| 410 |
+
|
| 411 |
+
sales_indicators = [
|
| 412 |
+
'invoice', 'sale', 'amount', 'product', 'quantity', 'rate',
|
| 413 |
+
'total', 'price', 'bill', 'payment', 'item', 'qty'
|
| 414 |
+
]
|
| 415 |
+
|
| 416 |
+
found_indicators = []
|
| 417 |
+
for indicator in sales_indicators:
|
| 418 |
+
matching_cols = [col for col in columns_lower if indicator in col]
|
| 419 |
+
if matching_cols:
|
| 420 |
+
found_indicators.append((indicator, matching_cols))
|
| 421 |
+
|
| 422 |
+
print(f" Found sales indicators: {found_indicators}")
|
| 423 |
+
|
| 424 |
+
score = len(found_indicators)
|
| 425 |
+
print(f" Sales detection score: {score}")
|
| 426 |
+
|
| 427 |
+
return score >= 2
|
| 428 |
+
|
| 429 |
+
def process_sales_sheet(self, df, file_name, sheet_name):
|
| 430 |
+
"""Enhanced sales data processing with better logging"""
|
| 431 |
+
try:
|
| 432 |
+
processed_rows = 0
|
| 433 |
+
print(f"🔄 Processing sales sheet: {sheet_name} with {len(df)} rows")
|
| 434 |
+
|
| 435 |
+
for index, row in df.iterrows():
|
| 436 |
+
try:
|
| 437 |
+
# Skip header rows and empty rows
|
| 438 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 439 |
+
continue
|
| 440 |
+
|
| 441 |
+
print(f"🔧 Processing row {index}")
|
| 442 |
+
|
| 443 |
+
# Extract sales data with flexible column mapping
|
| 444 |
+
invoice_no = self._extract_sales_value(row, 'invoice', 0, f"INV_{datetime.now().strftime('%Y%m%d%H%M%S')}_{index}")
|
| 445 |
+
customer_name = self._extract_sales_value(row, 'customer', 1, "Unknown Customer")
|
| 446 |
+
product_name = self._extract_sales_value(row, 'product', 2, "Unknown Product")
|
| 447 |
+
quantity = self._safe_float(self._extract_sales_value(row, 'quantity', 3, 0))
|
| 448 |
+
amount = self._safe_float(self._extract_sales_value(row, 'amount', 4, 0))
|
| 449 |
+
|
| 450 |
+
print(f" Extracted - Invoice: '{invoice_no}', Customer: '{customer_name}', Product: '{product_name}', Qty: {quantity}, Amount: {amount}")
|
| 451 |
+
|
| 452 |
+
# Validate essential data
|
| 453 |
+
if not customer_name or customer_name == "Unknown Customer":
|
| 454 |
+
print(f" ⚠️ Skipping - invalid customer name")
|
| 455 |
+
continue
|
| 456 |
+
|
| 457 |
+
if quantity <= 0:
|
| 458 |
+
print(f" ⚠️ Skipping - invalid quantity: {quantity}")
|
| 459 |
+
continue
|
| 460 |
+
|
| 461 |
+
if amount <= 0:
|
| 462 |
+
print(f" ⚠️ Skipping - invalid amount: {amount}")
|
| 463 |
+
continue
|
| 464 |
+
|
| 465 |
+
# Get or create customer
|
| 466 |
+
customer_id = self._get_or_create_customer(customer_name, "", "", "", "")
|
| 467 |
+
if not customer_id:
|
| 468 |
+
print(f" ⚠️ Skipping - could not get/create customer")
|
| 469 |
+
continue
|
| 470 |
+
|
| 471 |
+
# Get product ID
|
| 472 |
+
product_id = self._get_product_id(product_name)
|
| 473 |
+
if not product_id:
|
| 474 |
+
print(f" ⚠️ Skipping - product not found: '{product_name}'")
|
| 475 |
+
print(f" Available products: {list(self.product_mapping.keys())}")
|
| 476 |
+
continue
|
| 477 |
+
|
| 478 |
+
# Calculate rate
|
| 479 |
+
rate = amount / quantity if quantity > 0 else 0
|
| 480 |
+
|
| 481 |
+
# Create sale items
|
| 482 |
+
sale_date = datetime.now().date()
|
| 483 |
+
sale_items = [{
|
| 484 |
+
'product_id': product_id,
|
| 485 |
+
'quantity': quantity,
|
| 486 |
+
'rate': rate
|
| 487 |
+
}]
|
| 488 |
+
|
| 489 |
+
# Generate proper invoice number
|
| 490 |
+
if not invoice_no or invoice_no.startswith('INV_'):
|
| 491 |
+
invoice_no = self.db.generate_invoice_number()
|
| 492 |
+
|
| 493 |
+
print(f" Creating sale - Customer ID: {customer_id}, Product ID: {product_id}")
|
| 494 |
+
|
| 495 |
+
# Add sale to database
|
| 496 |
+
sale_id = self.db.add_sale(invoice_no, customer_id, sale_date, sale_items)
|
| 497 |
+
|
| 498 |
+
if sale_id and sale_id > 0:
|
| 499 |
+
processed_rows += 1
|
| 500 |
+
print(f" ✅ Successfully created sale ID: {sale_id}")
|
| 501 |
+
else:
|
| 502 |
+
print(f" ❌ Failed to create sale")
|
| 503 |
+
|
| 504 |
+
except Exception as e:
|
| 505 |
+
print(f" ❌ Error in row {index}: {e}")
|
| 506 |
+
import traceback
|
| 507 |
+
traceback.print_exc()
|
| 508 |
+
continue
|
| 509 |
+
|
| 510 |
+
print(f"🎉 Processed {processed_rows} sales from {sheet_name}")
|
| 511 |
+
return processed_rows > 0
|
| 512 |
+
|
| 513 |
+
except Exception as e:
|
| 514 |
+
print(f"💥 Error processing sales sheet: {e}")
|
| 515 |
+
import traceback
|
| 516 |
+
traceback.print_exc()
|
| 517 |
+
return False
|
| 518 |
+
def _extract_sales_value(self, row, field_name, default_index, default_value):
|
| 519 |
+
"""Extract sales values with flexible column matching"""
|
| 520 |
+
# Try to find column by name
|
| 521 |
+
for col_name in row.index:
|
| 522 |
+
if field_name in str(col_name).lower():
|
| 523 |
+
value = row[col_name]
|
| 524 |
+
if pd.notna(value):
|
| 525 |
+
return str(value).strip()
|
| 526 |
+
|
| 527 |
+
# Fallback to index
|
| 528 |
+
if len(row) > default_index:
|
| 529 |
+
value = row.iloc[default_index]
|
| 530 |
+
if pd.notna(value):
|
| 531 |
+
return str(value).strip()
|
| 532 |
+
|
| 533 |
+
return default_value
|
| 534 |
+
|
| 535 |
+
# Add to DataProcessor class
|
| 536 |
+
|
| 537 |
+
def _is_payment_sheet(self, df):
|
| 538 |
+
"""Detect payment sheets"""
|
| 539 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 540 |
+
|
| 541 |
+
payment_indicators = [
|
| 542 |
+
'payment', 'paid', 'amount', 'invoice', 'date', 'method',
|
| 543 |
+
'cash', 'gpay', 'cheque', 'bank', 'rrn', 'reference'
|
| 544 |
+
]
|
| 545 |
+
|
| 546 |
+
score = sum(1 for indicator in payment_indicators
|
| 547 |
+
if any(indicator in col for col in columns_lower))
|
| 548 |
+
|
| 549 |
+
print(f"🔍 Payment detection - Score: {score}, Columns: {columns_lower}")
|
| 550 |
+
return score >= 2
|
| 551 |
+
|
| 552 |
+
def process_payment_sheet(self, df, file_name, sheet_name):
|
| 553 |
+
"""Process payment data from sheet"""
|
| 554 |
+
try:
|
| 555 |
+
processed_rows = 0
|
| 556 |
+
print(f"🔄 Processing payment sheet: {sheet_name}")
|
| 557 |
+
|
| 558 |
+
for index, row in df.iterrows():
|
| 559 |
+
try:
|
| 560 |
+
if self._is_header_row(row) or pd.isna(row.iloc[0]):
|
| 561 |
+
continue
|
| 562 |
+
|
| 563 |
+
# Extract payment data
|
| 564 |
+
invoice_no = self._extract_sales_value(row, 'invoice', 0, "")
|
| 565 |
+
amount = self._safe_float(self._extract_sales_value(row, 'amount', 1, 0))
|
| 566 |
+
payment_date = self._extract_sales_value(row, 'date', 2, datetime.now().date())
|
| 567 |
+
payment_method = self._extract_sales_value(row, 'method', 3, "Cash")
|
| 568 |
+
|
| 569 |
+
if invoice_no and amount > 0:
|
| 570 |
+
# Find sale by invoice number
|
| 571 |
+
sale_result = self.db.execute_query(
|
| 572 |
+
'SELECT sale_id FROM sales WHERE invoice_no = ?',
|
| 573 |
+
(invoice_no,),
|
| 574 |
+
log_action=False
|
| 575 |
+
)
|
| 576 |
+
|
| 577 |
+
if sale_result:
|
| 578 |
+
sale_id = sale_result[0][0]
|
| 579 |
+
|
| 580 |
+
# Add payment
|
| 581 |
+
self.db.execute_query('''
|
| 582 |
+
INSERT INTO payments (sale_id, payment_date, payment_method, amount)
|
| 583 |
+
VALUES (?, ?, ?, ?)
|
| 584 |
+
''', (sale_id, payment_date, payment_method, amount))
|
| 585 |
+
|
| 586 |
+
processed_rows += 1
|
| 587 |
+
print(f"✅ Processed payment for invoice {invoice_no}")
|
| 588 |
+
|
| 589 |
+
except Exception as e:
|
| 590 |
+
print(f"❌ Error processing payment row {index}: {e}")
|
| 591 |
+
continue
|
| 592 |
+
|
| 593 |
+
print(f"🎉 Processed {processed_rows} payments from {sheet_name}")
|
| 594 |
+
return processed_rows > 0
|
| 595 |
+
|
| 596 |
+
except Exception as e:
|
| 597 |
+
print(f"💥 Error processing payment sheet: {e}")
|
| 598 |
+
return False
|
| 599 |
+
|
| 600 |
+
|
| 601 |
+
def _is_customer_sheet(self, df):
|
| 602 |
+
"""Check if sheet contains customer data - IMPROVED"""
|
| 603 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 604 |
+
|
| 605 |
+
customer_indicators = [
|
| 606 |
+
'customer', 'name', 'mobile', 'phone', 'village', 'taluka',
|
| 607 |
+
'district', 'code', 'contact'
|
| 608 |
+
]
|
| 609 |
+
|
| 610 |
+
score = sum(1 for indicator in customer_indicators
|
| 611 |
+
if any(indicator in col for col in columns_lower))
|
| 612 |
+
|
| 613 |
+
print(f"🔍 Customer sheet detection - Score: {score}, Columns: {columns_lower}")
|
| 614 |
+
return score >= 2
|
| 615 |
+
|
| 616 |
+
def _is_distributor_sheet(self, df):
|
| 617 |
+
"""Enhanced distributor sheet detection with better logging"""
|
| 618 |
+
columns_lower = [str(col).lower() for col in df.columns]
|
| 619 |
+
|
| 620 |
+
print(f"\n🔍 ENHANCED DISTRIBUTOR DETECTION:")
|
| 621 |
+
print(f" All columns: {columns_lower}")
|
| 622 |
+
|
| 623 |
+
distributor_indicators = [
|
| 624 |
+
'distributor', 'mantri', 'sabhasad', 'contact_in_group',
|
| 625 |
+
'village', 'taluka', 'district', 'leader', 'team', 'sabh'
|
| 626 |
+
]
|
| 627 |
+
|
| 628 |
+
found_indicators = []
|
| 629 |
+
for indicator in distributor_indicators:
|
| 630 |
+
matching_cols = [col for col in columns_lower if indicator in col]
|
| 631 |
+
if matching_cols:
|
| 632 |
+
found_indicators.append((indicator, matching_cols))
|
| 633 |
+
|
| 634 |
+
print(f" Found indicators: {found_indicators}")
|
| 635 |
+
|
| 636 |
+
score = len(found_indicators)
|
| 637 |
+
print(f" Detection score: {score}")
|
| 638 |
+
|
| 639 |
+
# More flexible detection - lower threshold
|
| 640 |
+
return score >= 1 # Even if we find just one indicator, try processing
|
| 641 |
+
|
| 642 |
+
def process_single_sheet(self, df, sheet_name, file_name):
|
| 643 |
+
"""Process a single sheet with detailed logging"""
|
| 644 |
+
print(f"🔄 Processing sheet: {sheet_name} from {file_name}")
|
| 645 |
+
|
| 646 |
+
if self._is_sales_sheet(df):
|
| 647 |
+
print("✅ Detected as SALES sheet")
|
| 648 |
+
return self.process_sales_sheet(df, file_name, sheet_name)
|
| 649 |
+
elif self._is_customer_sheet(df):
|
| 650 |
+
print("✅ Detected as CUSTOMER sheet")
|
| 651 |
+
return self.process_customer_sheet(df, file_name, sheet_name)
|
| 652 |
+
elif self._is_distributor_sheet(df):
|
| 653 |
+
print("✅ Detected as DISTRIBUTOR sheet")
|
| 654 |
+
return self.process_distributor_sheet(df, file_name, sheet_name)
|
| 655 |
+
else:
|
| 656 |
+
print("❓ Unknown sheet type - trying customer processing as fallback")
|
| 657 |
+
return self.process_customer_sheet(df, file_name, sheet_name)
|
| 658 |
+
|
| 659 |
+
def process_excel_file(self, file_path):
|
| 660 |
+
"""Enhanced file processing with all data types"""
|
| 661 |
+
try:
|
| 662 |
+
file_name = os.path.basename(file_path)
|
| 663 |
+
print(f"🚀 Processing file: {file_name}")
|
| 664 |
+
|
| 665 |
+
excel_file = pd.ExcelFile(file_path)
|
| 666 |
+
processed_sheets = 0
|
| 667 |
+
|
| 668 |
+
for sheet_name in excel_file.sheet_names:
|
| 669 |
+
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
| 670 |
+
df_clean = self._clean_dataframe(df)
|
| 671 |
+
|
| 672 |
+
print(f"\n📊 Sheet: {sheet_name}")
|
| 673 |
+
print(f" Columns: {df_clean.columns.tolist()}")
|
| 674 |
+
|
| 675 |
+
# Check all types with priority
|
| 676 |
+
is_payment = self._is_payment_sheet(df_clean)
|
| 677 |
+
is_sales = self._is_sales_sheet(df_clean)
|
| 678 |
+
is_customer = self._is_customer_sheet(df_clean)
|
| 679 |
+
is_distributor = self._is_distributor_sheet(df_clean)
|
| 680 |
+
|
| 681 |
+
print(f" Detection - Payment: {is_payment}, Sales: {is_sales}, Customer: {is_customer}, Distributor: {is_distributor}")
|
| 682 |
+
|
| 683 |
+
processed = False
|
| 684 |
+
if is_payment:
|
| 685 |
+
print(" 💳 Processing as PAYMENT sheet")
|
| 686 |
+
processed = self.process_payment_sheet(df_clean, file_name, sheet_name)
|
| 687 |
+
elif is_sales:
|
| 688 |
+
print(" 💰 Processing as SALES sheet")
|
| 689 |
+
processed = self.process_sales_sheet(df_clean, file_name, sheet_name)
|
| 690 |
+
elif is_distributor:
|
| 691 |
+
print(" 🤝 Processing as DISTRIBUTOR sheet")
|
| 692 |
+
processed = self.process_distributor_sheet(df_clean, file_name, sheet_name)
|
| 693 |
+
elif is_customer:
|
| 694 |
+
print(" 👥 Processing as CUSTOMER sheet")
|
| 695 |
+
processed = self.process_customer_sheet(df_clean, file_name, sheet_name)
|
| 696 |
+
else:
|
| 697 |
+
print(" ❓ Unknown sheet type")
|
| 698 |
+
|
| 699 |
+
if processed:
|
| 700 |
+
processed_sheets += 1
|
| 701 |
+
print(f" ✅ Successfully processed")
|
| 702 |
+
else:
|
| 703 |
+
print(f" ❌ Failed to process")
|
| 704 |
+
|
| 705 |
+
print(f"\n🎉 File processing complete: {processed_sheets}/{len(excel_file.sheet_names)} sheets processed")
|
| 706 |
+
return processed_sheets > 0
|
| 707 |
+
|
| 708 |
+
except Exception as e:
|
| 709 |
+
print(f"💥 Error processing file {file_path}: {e}")
|
| 710 |
+
return False
|
database.py
ADDED
|
@@ -0,0 +1,1106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sqlite3
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import os
|
| 4 |
+
import logging
|
| 5 |
+
from datetime import datetime, timedelta
|
| 6 |
+
from typing import List, Dict, Any, Optional
|
| 7 |
+
import random
|
| 8 |
+
|
| 9 |
+
# Set up logging
|
| 10 |
+
logging.basicConfig(level=logging.INFO)
|
| 11 |
+
logger = logging.getLogger(__name__)
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class DatabaseManager:
|
| 15 |
+
def __init__(self, db_path="sales_management.db"):
|
| 16 |
+
self.db_path = db_path
|
| 17 |
+
self._is_logging = False # Prevent recursion
|
| 18 |
+
self.init_database()
|
| 19 |
+
|
| 20 |
+
def get_connection(self):
|
| 21 |
+
"""Get database connection with error handling"""
|
| 22 |
+
try:
|
| 23 |
+
conn = sqlite3.connect(self.db_path)
|
| 24 |
+
conn.row_factory = sqlite3.Row # This enables column access by name
|
| 25 |
+
return conn
|
| 26 |
+
except sqlite3.Error as e:
|
| 27 |
+
logger.error(f"Database connection error: {e}")
|
| 28 |
+
raise
|
| 29 |
+
|
| 30 |
+
def init_database(self):
|
| 31 |
+
"""Initialize database with all tables and relationships"""
|
| 32 |
+
conn = self.get_connection()
|
| 33 |
+
|
| 34 |
+
try:
|
| 35 |
+
# Customers table
|
| 36 |
+
conn.execute("""
|
| 37 |
+
CREATE TABLE IF NOT EXISTS customers (
|
| 38 |
+
customer_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 39 |
+
customer_code TEXT UNIQUE,
|
| 40 |
+
name TEXT NOT NULL,
|
| 41 |
+
mobile TEXT,
|
| 42 |
+
village TEXT,
|
| 43 |
+
taluka TEXT,
|
| 44 |
+
district TEXT,
|
| 45 |
+
status TEXT DEFAULT 'Active',
|
| 46 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 47 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 48 |
+
)
|
| 49 |
+
""")
|
| 50 |
+
|
| 51 |
+
# Distributors table
|
| 52 |
+
conn.execute("""
|
| 53 |
+
CREATE TABLE IF NOT EXISTS distributors (
|
| 54 |
+
distributor_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 55 |
+
name TEXT NOT NULL,
|
| 56 |
+
village TEXT,
|
| 57 |
+
taluka TEXT,
|
| 58 |
+
district TEXT,
|
| 59 |
+
mantri_name TEXT,
|
| 60 |
+
mantri_mobile TEXT,
|
| 61 |
+
sabhasad_count INTEGER DEFAULT 0,
|
| 62 |
+
contact_in_group INTEGER DEFAULT 0,
|
| 63 |
+
status TEXT DEFAULT 'Active',
|
| 64 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 65 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 66 |
+
)
|
| 67 |
+
""")
|
| 68 |
+
|
| 69 |
+
# Products table
|
| 70 |
+
conn.execute("""
|
| 71 |
+
CREATE TABLE IF NOT EXISTS products (
|
| 72 |
+
product_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 73 |
+
product_name TEXT UNIQUE NOT NULL,
|
| 74 |
+
packing_type TEXT,
|
| 75 |
+
capacity_ltr REAL,
|
| 76 |
+
category TEXT,
|
| 77 |
+
standard_rate REAL,
|
| 78 |
+
is_active INTEGER DEFAULT 1,
|
| 79 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 80 |
+
)
|
| 81 |
+
""")
|
| 82 |
+
|
| 83 |
+
# Sales table
|
| 84 |
+
conn.execute("""
|
| 85 |
+
CREATE TABLE IF NOT EXISTS sales (
|
| 86 |
+
sale_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 87 |
+
invoice_no TEXT UNIQUE NOT NULL,
|
| 88 |
+
customer_id INTEGER,
|
| 89 |
+
sale_date DATE,
|
| 90 |
+
total_amount REAL DEFAULT 0,
|
| 91 |
+
total_liters REAL DEFAULT 0,
|
| 92 |
+
payment_status TEXT DEFAULT 'Pending',
|
| 93 |
+
notes TEXT,
|
| 94 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 95 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 96 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL
|
| 97 |
+
)
|
| 98 |
+
""")
|
| 99 |
+
|
| 100 |
+
# Sale items table
|
| 101 |
+
conn.execute("""
|
| 102 |
+
CREATE TABLE IF NOT EXISTS sale_items (
|
| 103 |
+
item_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 104 |
+
sale_id INTEGER,
|
| 105 |
+
product_id INTEGER,
|
| 106 |
+
quantity INTEGER,
|
| 107 |
+
rate REAL,
|
| 108 |
+
amount REAL,
|
| 109 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 110 |
+
FOREIGN KEY (sale_id) REFERENCES sales (sale_id) ON DELETE CASCADE,
|
| 111 |
+
FOREIGN KEY (product_id) REFERENCES products (product_id) ON DELETE SET NULL
|
| 112 |
+
)
|
| 113 |
+
""")
|
| 114 |
+
|
| 115 |
+
# Payments table
|
| 116 |
+
conn.execute("""
|
| 117 |
+
CREATE TABLE IF NOT EXISTS payments (
|
| 118 |
+
payment_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 119 |
+
sale_id INTEGER,
|
| 120 |
+
payment_date DATE,
|
| 121 |
+
payment_method TEXT,
|
| 122 |
+
amount REAL,
|
| 123 |
+
rrn TEXT,
|
| 124 |
+
reference TEXT,
|
| 125 |
+
status TEXT DEFAULT 'Completed',
|
| 126 |
+
notes TEXT,
|
| 127 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 128 |
+
FOREIGN KEY (sale_id) REFERENCES sales (sale_id) ON DELETE CASCADE
|
| 129 |
+
)
|
| 130 |
+
""")
|
| 131 |
+
|
| 132 |
+
# Demos table
|
| 133 |
+
conn.execute("""
|
| 134 |
+
CREATE TABLE IF NOT EXISTS demos (
|
| 135 |
+
demo_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 136 |
+
customer_id INTEGER,
|
| 137 |
+
distributor_id INTEGER,
|
| 138 |
+
demo_date DATE,
|
| 139 |
+
demo_time TIME,
|
| 140 |
+
product_id INTEGER,
|
| 141 |
+
quantity_provided INTEGER,
|
| 142 |
+
follow_up_date DATE,
|
| 143 |
+
conversion_status TEXT DEFAULT 'Not Converted',
|
| 144 |
+
notes TEXT,
|
| 145 |
+
demo_location TEXT,
|
| 146 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 147 |
+
updated_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 148 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 149 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL,
|
| 150 |
+
FOREIGN KEY (product_id) REFERENCES products (product_id) ON DELETE SET NULL
|
| 151 |
+
)
|
| 152 |
+
""")
|
| 153 |
+
|
| 154 |
+
# WhatsApp logs table
|
| 155 |
+
conn.execute("""
|
| 156 |
+
CREATE TABLE IF NOT EXISTS whatsapp_logs (
|
| 157 |
+
log_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 158 |
+
customer_id INTEGER,
|
| 159 |
+
distributor_id INTEGER,
|
| 160 |
+
message_type TEXT,
|
| 161 |
+
message_content TEXT,
|
| 162 |
+
status TEXT,
|
| 163 |
+
sent_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 164 |
+
response TEXT,
|
| 165 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 166 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL
|
| 167 |
+
)
|
| 168 |
+
""")
|
| 169 |
+
|
| 170 |
+
# Follow-ups table
|
| 171 |
+
conn.execute("""
|
| 172 |
+
CREATE TABLE IF NOT EXISTS follow_ups (
|
| 173 |
+
follow_up_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 174 |
+
customer_id INTEGER,
|
| 175 |
+
distributor_id INTEGER,
|
| 176 |
+
demo_id INTEGER,
|
| 177 |
+
follow_up_date DATE,
|
| 178 |
+
follow_up_type TEXT,
|
| 179 |
+
notes TEXT,
|
| 180 |
+
status TEXT DEFAULT 'Pending',
|
| 181 |
+
next_follow_up_date DATE,
|
| 182 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 183 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 184 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL,
|
| 185 |
+
FOREIGN KEY (demo_id) REFERENCES demos (demo_id) ON DELETE SET NULL
|
| 186 |
+
)
|
| 187 |
+
""")
|
| 188 |
+
|
| 189 |
+
# System logs table
|
| 190 |
+
conn.execute("""
|
| 191 |
+
CREATE TABLE IF NOT EXISTS system_logs (
|
| 192 |
+
log_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 193 |
+
log_type TEXT,
|
| 194 |
+
log_message TEXT,
|
| 195 |
+
table_name TEXT,
|
| 196 |
+
record_id INTEGER,
|
| 197 |
+
action TEXT,
|
| 198 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 199 |
+
user_info TEXT
|
| 200 |
+
)
|
| 201 |
+
""")
|
| 202 |
+
|
| 203 |
+
# Rollback logs table
|
| 204 |
+
conn.execute("""
|
| 205 |
+
CREATE TABLE IF NOT EXISTS rollback_logs (
|
| 206 |
+
rollback_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 207 |
+
table_name TEXT,
|
| 208 |
+
record_id INTEGER,
|
| 209 |
+
old_data TEXT,
|
| 210 |
+
new_data TEXT,
|
| 211 |
+
action TEXT,
|
| 212 |
+
rollback_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 213 |
+
rolled_back_by TEXT
|
| 214 |
+
)
|
| 215 |
+
""")
|
| 216 |
+
|
| 217 |
+
# Offers table
|
| 218 |
+
conn.execute("""
|
| 219 |
+
CREATE TABLE IF NOT EXISTS offers (
|
| 220 |
+
offer_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 221 |
+
offer_name TEXT NOT NULL,
|
| 222 |
+
offer_description TEXT,
|
| 223 |
+
product_id INTEGER,
|
| 224 |
+
discount_percentage REAL,
|
| 225 |
+
discount_amount REAL,
|
| 226 |
+
start_date DATE,
|
| 227 |
+
end_date DATE,
|
| 228 |
+
status TEXT DEFAULT 'Active',
|
| 229 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 230 |
+
FOREIGN KEY (product_id) REFERENCES products (product_id) ON DELETE SET NULL
|
| 231 |
+
)
|
| 232 |
+
""")
|
| 233 |
+
|
| 234 |
+
# Demo teams table
|
| 235 |
+
conn.execute("""
|
| 236 |
+
CREATE TABLE IF NOT EXISTS demo_teams (
|
| 237 |
+
team_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 238 |
+
team_name TEXT NOT NULL,
|
| 239 |
+
team_leader TEXT,
|
| 240 |
+
team_members TEXT,
|
| 241 |
+
assigned_villages TEXT,
|
| 242 |
+
status TEXT DEFAULT 'Active',
|
| 243 |
+
created_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 244 |
+
)
|
| 245 |
+
""")
|
| 246 |
+
|
| 247 |
+
conn.commit()
|
| 248 |
+
logger.info("Database tables initialized successfully")
|
| 249 |
+
|
| 250 |
+
except sqlite3.Error as e:
|
| 251 |
+
logger.error(f"Error initializing database: {e}")
|
| 252 |
+
raise
|
| 253 |
+
finally:
|
| 254 |
+
conn.close()
|
| 255 |
+
|
| 256 |
+
self.initialize_default_data()
|
| 257 |
+
self.migrate_database()
|
| 258 |
+
self.create_indexes()
|
| 259 |
+
|
| 260 |
+
def create_indexes(self):
|
| 261 |
+
"""Create indexes for better performance"""
|
| 262 |
+
conn = self.get_connection()
|
| 263 |
+
|
| 264 |
+
try:
|
| 265 |
+
# Create indexes for frequently queried columns
|
| 266 |
+
indexes = [
|
| 267 |
+
"CREATE INDEX IF NOT EXISTS idx_customers_village ON customers(village)",
|
| 268 |
+
"CREATE INDEX IF NOT EXISTS idx_customers_mobile ON customers(mobile)",
|
| 269 |
+
"CREATE INDEX IF NOT EXISTS idx_sales_customer_id ON sales(customer_id)",
|
| 270 |
+
"CREATE INDEX IF NOT EXISTS idx_sales_date ON sales(sale_date)",
|
| 271 |
+
"CREATE INDEX IF NOT EXISTS idx_sales_invoice ON sales(invoice_no)",
|
| 272 |
+
"CREATE INDEX IF NOT EXISTS idx_payments_sale_id ON payments(sale_id)",
|
| 273 |
+
"CREATE INDEX IF NOT EXISTS idx_demos_customer_id ON demos(customer_id)",
|
| 274 |
+
"CREATE INDEX IF NOT EXISTS idx_demos_date ON demos(demo_date)",
|
| 275 |
+
"CREATE INDEX IF NOT EXISTS idx_sale_items_sale_id ON sale_items(sale_id)",
|
| 276 |
+
"CREATE INDEX IF NOT EXISTS idx_follow_ups_date ON follow_ups(follow_up_date)",
|
| 277 |
+
"CREATE INDEX IF NOT EXISTS idx_whatsapp_customer_id ON whatsapp_logs(customer_id)",
|
| 278 |
+
]
|
| 279 |
+
|
| 280 |
+
for index_sql in indexes:
|
| 281 |
+
conn.execute(index_sql)
|
| 282 |
+
|
| 283 |
+
conn.commit()
|
| 284 |
+
logger.info("Database indexes created successfully")
|
| 285 |
+
|
| 286 |
+
except sqlite3.Error as e:
|
| 287 |
+
logger.error(f"Error creating indexes: {e}")
|
| 288 |
+
finally:
|
| 289 |
+
conn.close()
|
| 290 |
+
|
| 291 |
+
def migrate_database(self):
|
| 292 |
+
"""Migrate existing database to add missing columns"""
|
| 293 |
+
conn = self.get_connection()
|
| 294 |
+
try:
|
| 295 |
+
cursor = conn.cursor()
|
| 296 |
+
|
| 297 |
+
# Check if demo_time column exists
|
| 298 |
+
cursor.execute("PRAGMA table_info(demos)")
|
| 299 |
+
columns = [column[1] for column in cursor.fetchall()]
|
| 300 |
+
|
| 301 |
+
# Add demo_time column if it doesn't exist
|
| 302 |
+
if "demo_time" not in columns:
|
| 303 |
+
cursor.execute("ALTER TABLE demos ADD COLUMN demo_time TIME")
|
| 304 |
+
logger.info("Added demo_time column to demos table")
|
| 305 |
+
|
| 306 |
+
# Add demo_location column if it doesn't exist
|
| 307 |
+
if "demo_location" not in columns:
|
| 308 |
+
cursor.execute("ALTER TABLE demos ADD COLUMN demo_location TEXT")
|
| 309 |
+
logger.info("Added demo_location column to demos table")
|
| 310 |
+
|
| 311 |
+
conn.commit()
|
| 312 |
+
logger.info("Database migration completed successfully")
|
| 313 |
+
|
| 314 |
+
except sqlite3.Error as e:
|
| 315 |
+
logger.error(f"Error during database migration: {e}")
|
| 316 |
+
conn.rollback()
|
| 317 |
+
finally:
|
| 318 |
+
conn.close()
|
| 319 |
+
|
| 320 |
+
def initialize_default_data(self):
|
| 321 |
+
"""Initialize with default products and demo teams"""
|
| 322 |
+
default_products = [
|
| 323 |
+
("1 LTR PLASTIC JAR", "PLASTIC_JAR", 1.0, "Regular", 95),
|
| 324 |
+
("2 LTR PLASTIC JAR", "PLASTIC_JAR", 2.0, "Regular", 185),
|
| 325 |
+
("5 LTR PLASTIC JAR", "PLASTIC_JAR", 5.0, "Regular", 460),
|
| 326 |
+
("5 LTR STEEL BARNI", "STEEL_BARNI", 5.0, "Premium", 680),
|
| 327 |
+
("10 LTR STEEL BARNI", "STEEL_BARNI", 10.0, "Premium", 1300),
|
| 328 |
+
("20 LTR STEEL BARNI", "STEEL_BARNI", 20.0, "Premium", 2950),
|
| 329 |
+
("20 LTR PLASTIC CAN", "PLASTIC_CAN", 20.0, "Regular", 2400),
|
| 330 |
+
("1 LTR PET BOTTLE", "PET_BOTTLE", 1.0, "Regular", 85),
|
| 331 |
+
]
|
| 332 |
+
|
| 333 |
+
default_teams = [
|
| 334 |
+
(
|
| 335 |
+
"Team A - North Region",
|
| 336 |
+
"Rajesh Kumar",
|
| 337 |
+
"Mohan, Suresh, Priya",
|
| 338 |
+
"Amiyad, Amvad, Ankalav",
|
| 339 |
+
),
|
| 340 |
+
(
|
| 341 |
+
"Team B - South Region",
|
| 342 |
+
"Sunil Patel",
|
| 343 |
+
"Anita, Vijay, Deepak",
|
| 344 |
+
"Petlad, Borsad, Vadodara",
|
| 345 |
+
),
|
| 346 |
+
]
|
| 347 |
+
|
| 348 |
+
conn = self.get_connection()
|
| 349 |
+
try:
|
| 350 |
+
# Insert default products
|
| 351 |
+
for product in default_products:
|
| 352 |
+
conn.execute(
|
| 353 |
+
"""
|
| 354 |
+
INSERT OR IGNORE INTO products (product_name, packing_type, capacity_ltr, category, standard_rate)
|
| 355 |
+
VALUES (?, ?, ?, ?, ?)
|
| 356 |
+
""",
|
| 357 |
+
product,
|
| 358 |
+
)
|
| 359 |
+
|
| 360 |
+
# Insert default demo teams
|
| 361 |
+
for team in default_teams:
|
| 362 |
+
conn.execute(
|
| 363 |
+
"""
|
| 364 |
+
INSERT OR IGNORE INTO demo_teams (team_name, team_leader, team_members, assigned_villages)
|
| 365 |
+
VALUES (?, ?, ?, ?)
|
| 366 |
+
""",
|
| 367 |
+
team,
|
| 368 |
+
)
|
| 369 |
+
|
| 370 |
+
conn.commit()
|
| 371 |
+
logger.info("Default data initialized successfully")
|
| 372 |
+
|
| 373 |
+
except sqlite3.Error as e:
|
| 374 |
+
logger.error(f"Error initializing default data: {e}")
|
| 375 |
+
finally:
|
| 376 |
+
conn.close()
|
| 377 |
+
|
| 378 |
+
def _execute_query_internal(self, query: str, params: tuple = None) -> List[tuple]:
|
| 379 |
+
"""Internal method to execute SQL query without logging"""
|
| 380 |
+
conn = self.get_connection()
|
| 381 |
+
try:
|
| 382 |
+
cursor = conn.cursor()
|
| 383 |
+
if params:
|
| 384 |
+
cursor.execute(query, params)
|
| 385 |
+
else:
|
| 386 |
+
cursor.execute(query)
|
| 387 |
+
|
| 388 |
+
# Only try to fetch results for SELECT queries
|
| 389 |
+
if query.strip().upper().startswith("SELECT"):
|
| 390 |
+
result = cursor.fetchall()
|
| 391 |
+
elif query.strip().upper().startswith("INSERT"):
|
| 392 |
+
# For INSERT queries, return the lastrowid as a single-row result
|
| 393 |
+
result = [(cursor.lastrowid,)]
|
| 394 |
+
else:
|
| 395 |
+
result = []
|
| 396 |
+
|
| 397 |
+
conn.commit()
|
| 398 |
+
return result
|
| 399 |
+
|
| 400 |
+
except sqlite3.Error as e:
|
| 401 |
+
logger.error(f"Database query error: {e}")
|
| 402 |
+
conn.rollback()
|
| 403 |
+
raise
|
| 404 |
+
finally:
|
| 405 |
+
conn.close()
|
| 406 |
+
|
| 407 |
+
def execute_query(
|
| 408 |
+
self, query: str, params: tuple = None, log_action: bool = True
|
| 409 |
+
) -> List[tuple]:
|
| 410 |
+
"""Execute a SQL query with comprehensive error handling"""
|
| 411 |
+
try:
|
| 412 |
+
result = self._execute_query_internal(query, params)
|
| 413 |
+
|
| 414 |
+
# Log the query execution (but avoid recursion)
|
| 415 |
+
if log_action and not self._is_logging:
|
| 416 |
+
try:
|
| 417 |
+
self._is_logging = True
|
| 418 |
+
self._execute_query_internal(
|
| 419 |
+
"""
|
| 420 |
+
INSERT INTO system_logs (log_type, log_message, table_name, record_id, action)
|
| 421 |
+
VALUES (?, ?, ?, ?, ?)
|
| 422 |
+
""",
|
| 423 |
+
(
|
| 424 |
+
"QUERY_EXECUTION",
|
| 425 |
+
f"Executed query: {query[:100]}...",
|
| 426 |
+
None,
|
| 427 |
+
None,
|
| 428 |
+
"EXECUTE",
|
| 429 |
+
),
|
| 430 |
+
)
|
| 431 |
+
except Exception as e:
|
| 432 |
+
logger.error(f"Error logging system action: {e}")
|
| 433 |
+
finally:
|
| 434 |
+
self._is_logging = False
|
| 435 |
+
|
| 436 |
+
return result
|
| 437 |
+
except Exception as e:
|
| 438 |
+
logger.error(f"Error in execute_query: {e}")
|
| 439 |
+
return [] # Return empty list instead of raising exception
|
| 440 |
+
|
| 441 |
+
def get_dataframe(
|
| 442 |
+
self, table_name: str = None, query: str = None, params: tuple = None
|
| 443 |
+
) -> pd.DataFrame:
|
| 444 |
+
"""Get table data as DataFrame with flexible query support"""
|
| 445 |
+
conn = self.get_connection()
|
| 446 |
+
try:
|
| 447 |
+
if query:
|
| 448 |
+
df = pd.read_sql_query(query, conn, params=params)
|
| 449 |
+
else:
|
| 450 |
+
df = pd.read_sql_query(f"SELECT * FROM {table_name}", conn)
|
| 451 |
+
return df
|
| 452 |
+
except Exception as e:
|
| 453 |
+
logger.error(
|
| 454 |
+
f"Error getting DataFrame for {table_name if table_name else 'query'}: {e}"
|
| 455 |
+
)
|
| 456 |
+
# Return empty DataFrame with proper structure
|
| 457 |
+
return pd.DataFrame()
|
| 458 |
+
finally:
|
| 459 |
+
conn.close()
|
| 460 |
+
|
| 461 |
+
def add_customer(
|
| 462 |
+
self,
|
| 463 |
+
name: str,
|
| 464 |
+
mobile: str = "",
|
| 465 |
+
village: str = "",
|
| 466 |
+
taluka: str = "",
|
| 467 |
+
district: str = "",
|
| 468 |
+
customer_code: str = None,
|
| 469 |
+
) -> int:
|
| 470 |
+
"""Add a new customer with duplicate handling"""
|
| 471 |
+
|
| 472 |
+
# Generate customer code if not provided
|
| 473 |
+
if not customer_code:
|
| 474 |
+
customer_code = f"CUST{datetime.now().strftime('%Y%m%d%H%M%S')}{random.randint(100, 999)}"
|
| 475 |
+
|
| 476 |
+
try:
|
| 477 |
+
# Check if customer already exists (by mobile or similar name+village)
|
| 478 |
+
existing_customer = self.execute_query(
|
| 479 |
+
"SELECT customer_id FROM customers WHERE mobile = ? OR (name = ? AND village = ?)",
|
| 480 |
+
(mobile, name, village),
|
| 481 |
+
log_action=False,
|
| 482 |
+
)
|
| 483 |
+
|
| 484 |
+
if existing_customer:
|
| 485 |
+
# Customer already exists, return existing ID
|
| 486 |
+
return existing_customer[0][0]
|
| 487 |
+
|
| 488 |
+
# If customer_code already exists, generate a new one
|
| 489 |
+
max_attempts = 5
|
| 490 |
+
for attempt in range(max_attempts):
|
| 491 |
+
try:
|
| 492 |
+
result = self.execute_query(
|
| 493 |
+
"""
|
| 494 |
+
INSERT INTO customers (customer_code, name, mobile, village, taluka, district)
|
| 495 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 496 |
+
""",
|
| 497 |
+
(customer_code, name, mobile, village, taluka, district),
|
| 498 |
+
log_action=False,
|
| 499 |
+
)
|
| 500 |
+
break
|
| 501 |
+
except sqlite3.IntegrityError as e:
|
| 502 |
+
if (
|
| 503 |
+
"UNIQUE constraint failed: customers.customer_code" in str(e)
|
| 504 |
+
and attempt < max_attempts - 1
|
| 505 |
+
):
|
| 506 |
+
# Generate new unique customer code
|
| 507 |
+
customer_code = f"CUST{datetime.now().strftime('%Y%m%d%H%M%S')}{random.randint(1000, 9999)}"
|
| 508 |
+
continue
|
| 509 |
+
else:
|
| 510 |
+
raise e
|
| 511 |
+
|
| 512 |
+
# Get the inserted customer_id
|
| 513 |
+
customer_id = self.execute_query(
|
| 514 |
+
"SELECT last_insert_rowid()", log_action=False
|
| 515 |
+
)[0][0]
|
| 516 |
+
|
| 517 |
+
self.log_system_action(
|
| 518 |
+
"CUSTOMER_ADD",
|
| 519 |
+
f"Added customer: {name}",
|
| 520 |
+
"customers",
|
| 521 |
+
customer_id,
|
| 522 |
+
"INSERT",
|
| 523 |
+
)
|
| 524 |
+
|
| 525 |
+
return customer_id
|
| 526 |
+
except Exception as e:
|
| 527 |
+
logger.error(f"Error adding customer: {e}")
|
| 528 |
+
# Return a fallback - this won't be in database but prevents crashes
|
| 529 |
+
return -1
|
| 530 |
+
|
| 531 |
+
def add_distributor(
|
| 532 |
+
self,
|
| 533 |
+
name: str,
|
| 534 |
+
village: str = "",
|
| 535 |
+
taluka: str = "",
|
| 536 |
+
district: str = "",
|
| 537 |
+
mantri_name: str = "",
|
| 538 |
+
mantri_mobile: str = "",
|
| 539 |
+
sabhasad_count: int = 0,
|
| 540 |
+
contact_in_group: int = 0,
|
| 541 |
+
status: str = "Active",
|
| 542 |
+
) -> int:
|
| 543 |
+
"""Add a new distributor with duplicate handling"""
|
| 544 |
+
|
| 545 |
+
try:
|
| 546 |
+
# Check if distributor already exists
|
| 547 |
+
existing_distributor = self.execute_query(
|
| 548 |
+
"SELECT distributor_id FROM distributors WHERE name = ? AND village = ? AND taluka = ?",
|
| 549 |
+
(name, village, taluka),
|
| 550 |
+
log_action=False,
|
| 551 |
+
)
|
| 552 |
+
|
| 553 |
+
if existing_distributor:
|
| 554 |
+
# Distributor already exists, return existing ID
|
| 555 |
+
return existing_distributor[0][0]
|
| 556 |
+
|
| 557 |
+
# Insert new distributor
|
| 558 |
+
self.execute_query(
|
| 559 |
+
"""
|
| 560 |
+
INSERT INTO distributors (name, village, taluka, district, mantri_name, mantri_mobile,
|
| 561 |
+
sabhasad_count, contact_in_group, status)
|
| 562 |
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 563 |
+
""",
|
| 564 |
+
(
|
| 565 |
+
name,
|
| 566 |
+
village,
|
| 567 |
+
taluka,
|
| 568 |
+
district,
|
| 569 |
+
mantri_name,
|
| 570 |
+
mantri_mobile,
|
| 571 |
+
sabhasad_count,
|
| 572 |
+
contact_in_group,
|
| 573 |
+
status,
|
| 574 |
+
),
|
| 575 |
+
log_action=False,
|
| 576 |
+
)
|
| 577 |
+
|
| 578 |
+
# Get the inserted distributor_id
|
| 579 |
+
distributor_id = self.execute_query(
|
| 580 |
+
"SELECT last_insert_rowid()", log_action=False
|
| 581 |
+
)[0][0]
|
| 582 |
+
|
| 583 |
+
self.log_system_action(
|
| 584 |
+
"DISTRIBUTOR_ADD",
|
| 585 |
+
f"Added distributor: {name}",
|
| 586 |
+
"distributors",
|
| 587 |
+
distributor_id,
|
| 588 |
+
"INSERT",
|
| 589 |
+
)
|
| 590 |
+
|
| 591 |
+
return distributor_id
|
| 592 |
+
|
| 593 |
+
except Exception as e:
|
| 594 |
+
logger.error(f"Error adding distributor: {e}")
|
| 595 |
+
return -1
|
| 596 |
+
|
| 597 |
+
def get_distributor_by_location(self, village: str, taluka: str) -> Optional[Dict]:
|
| 598 |
+
"""Get distributor by village and taluka"""
|
| 599 |
+
try:
|
| 600 |
+
result = self.execute_query(
|
| 601 |
+
"SELECT * FROM distributors WHERE village = ? AND taluka = ?",
|
| 602 |
+
(village, taluka),
|
| 603 |
+
log_action=False,
|
| 604 |
+
)
|
| 605 |
+
if result:
|
| 606 |
+
return dict(result[0])
|
| 607 |
+
return None
|
| 608 |
+
except Exception as e:
|
| 609 |
+
logger.error(f"Error getting distributor by location: {e}")
|
| 610 |
+
return None
|
| 611 |
+
|
| 612 |
+
def distributor_exists(self, name: str, village: str, taluka: str) -> bool:
|
| 613 |
+
"""Check if distributor already exists"""
|
| 614 |
+
try:
|
| 615 |
+
result = self.execute_query(
|
| 616 |
+
"SELECT distributor_id FROM distributors WHERE name = ? AND village = ? AND taluka = ?",
|
| 617 |
+
(name, village, taluka),
|
| 618 |
+
log_action=False,
|
| 619 |
+
)
|
| 620 |
+
return len(result) > 0
|
| 621 |
+
except Exception as e:
|
| 622 |
+
logger.error(f"Error checking distributor existence: {e}")
|
| 623 |
+
return False
|
| 624 |
+
|
| 625 |
+
# In your DatabaseManager class in database.py, replace the generate_invoice_number method:
|
| 626 |
+
|
| 627 |
+
def generate_invoice_number(self):
|
| 628 |
+
"""Generate automatic invoice number in format: INVCLmmyyserial"""
|
| 629 |
+
try:
|
| 630 |
+
# Get current date components
|
| 631 |
+
now = datetime.now()
|
| 632 |
+
month = now.strftime("%m") # Two-digit month
|
| 633 |
+
year = now.strftime("%y") # Two-digit year
|
| 634 |
+
|
| 635 |
+
# Get the last invoice number for this month-year
|
| 636 |
+
result = self.execute_query(
|
| 637 |
+
"SELECT invoice_no FROM sales WHERE invoice_no LIKE ? ORDER BY sale_id DESC LIMIT 1",
|
| 638 |
+
(f"INVCL{month}{year}%",),
|
| 639 |
+
log_action=False,
|
| 640 |
+
)
|
| 641 |
+
|
| 642 |
+
if result:
|
| 643 |
+
last_invoice = result[0][0]
|
| 644 |
+
# Extract serial number and increment
|
| 645 |
+
try:
|
| 646 |
+
# Format: INVCLmmyyXXX
|
| 647 |
+
serial_part = last_invoice[8:] # Get part after INVCLmmyy
|
| 648 |
+
last_serial = int(serial_part)
|
| 649 |
+
new_serial = last_serial + 1
|
| 650 |
+
except ValueError:
|
| 651 |
+
new_serial = 1
|
| 652 |
+
else:
|
| 653 |
+
# First invoice of the month-year
|
| 654 |
+
new_serial = 1
|
| 655 |
+
|
| 656 |
+
# Format: INVCL + month(2) + year(2) + serial(3 digits)
|
| 657 |
+
return f"INVCL{month}{year}{new_serial:03d}"
|
| 658 |
+
|
| 659 |
+
except Exception as e:
|
| 660 |
+
logger.error(f"Error generating invoice number: {e}")
|
| 661 |
+
# Fallback: timestamp-based
|
| 662 |
+
return f"INVCL{int(datetime.now().timestamp())}"
|
| 663 |
+
|
| 664 |
+
# Or if you want a more flexible version with configurable prefix:
|
| 665 |
+
def generate_invoice_number(self, prefix="INVCL"):
|
| 666 |
+
"""Generate automatic invoice number in format: PREFIXmmyyserial"""
|
| 667 |
+
try:
|
| 668 |
+
now = datetime.now()
|
| 669 |
+
month = now.strftime("%m")
|
| 670 |
+
year = now.strftime("%y")
|
| 671 |
+
|
| 672 |
+
result = self.execute_query(
|
| 673 |
+
"SELECT invoice_no FROM sales WHERE invoice_no LIKE ? ORDER BY sale_id DESC LIMIT 1",
|
| 674 |
+
(f"{prefix}{month}{year}%",),
|
| 675 |
+
log_action=False,
|
| 676 |
+
)
|
| 677 |
+
|
| 678 |
+
if result:
|
| 679 |
+
last_invoice = result[0][0]
|
| 680 |
+
try:
|
| 681 |
+
# Remove prefix and date part, get serial
|
| 682 |
+
serial_part = last_invoice[
|
| 683 |
+
len(prefix) + 4 :
|
| 684 |
+
] # prefix + 4 digits (mmyy)
|
| 685 |
+
last_serial = int(serial_part)
|
| 686 |
+
new_serial = last_serial + 1
|
| 687 |
+
except ValueError:
|
| 688 |
+
new_serial = 1
|
| 689 |
+
else:
|
| 690 |
+
new_serial = 1
|
| 691 |
+
|
| 692 |
+
return f"{prefix}{month}{year}{new_serial:03d}"
|
| 693 |
+
|
| 694 |
+
except Exception as e:
|
| 695 |
+
logger.error(f"Error generating invoice number: {e}")
|
| 696 |
+
return f"{prefix}{int(datetime.now().timestamp())}"
|
| 697 |
+
|
| 698 |
+
# Add to your DatabaseManager class in database.py
|
| 699 |
+
|
| 700 |
+
def add_sale(
|
| 701 |
+
self,
|
| 702 |
+
invoice_no: str,
|
| 703 |
+
customer_id: int,
|
| 704 |
+
sale_date,
|
| 705 |
+
items: List[Dict],
|
| 706 |
+
payments: List[Dict] = None,
|
| 707 |
+
notes: str = "",
|
| 708 |
+
) -> int:
|
| 709 |
+
"""Add a new sale with items and optional payments - ENHANCED"""
|
| 710 |
+
conn = self.get_connection()
|
| 711 |
+
try:
|
| 712 |
+
cursor = conn.cursor()
|
| 713 |
+
|
| 714 |
+
# Calculate total amount and liters
|
| 715 |
+
total_amount = sum(item["quantity"] * item["rate"] for item in items)
|
| 716 |
+
total_liters = sum(item.get("liters", 0) for item in items)
|
| 717 |
+
|
| 718 |
+
print(
|
| 719 |
+
f"🔧 DEBUG: Creating sale - Invoice: {invoice_no}, Customer: {customer_id}, Total: {total_amount}"
|
| 720 |
+
) # DEBUG
|
| 721 |
+
|
| 722 |
+
# Add sale record
|
| 723 |
+
cursor.execute(
|
| 724 |
+
"""
|
| 725 |
+
INSERT INTO sales (invoice_no, customer_id, sale_date, total_amount, total_liters, notes)
|
| 726 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 727 |
+
""",
|
| 728 |
+
(invoice_no, customer_id, sale_date, total_amount, total_liters, notes),
|
| 729 |
+
)
|
| 730 |
+
|
| 731 |
+
# Get the sale ID
|
| 732 |
+
sale_id = cursor.lastrowid
|
| 733 |
+
print(f"🔧 DEBUG: Sale created with ID: {sale_id}") # DEBUG
|
| 734 |
+
|
| 735 |
+
# Add sale items
|
| 736 |
+
for item in items:
|
| 737 |
+
amount = item["quantity"] * item["rate"]
|
| 738 |
+
print(
|
| 739 |
+
f"🔧 DEBUG: Adding item - Product: {item['product_id']}, Qty: {item['quantity']}, Rate: {item['rate']}"
|
| 740 |
+
) # DEBUG
|
| 741 |
+
|
| 742 |
+
cursor.execute(
|
| 743 |
+
"""
|
| 744 |
+
INSERT INTO sale_items (sale_id, product_id, quantity, rate, amount)
|
| 745 |
+
VALUES (?, ?, ?, ?, ?)
|
| 746 |
+
""",
|
| 747 |
+
(
|
| 748 |
+
sale_id,
|
| 749 |
+
item["product_id"],
|
| 750 |
+
item["quantity"],
|
| 751 |
+
item["rate"],
|
| 752 |
+
amount,
|
| 753 |
+
),
|
| 754 |
+
)
|
| 755 |
+
|
| 756 |
+
# Add payments if provided
|
| 757 |
+
if payments:
|
| 758 |
+
for payment in payments:
|
| 759 |
+
cursor.execute(
|
| 760 |
+
"""
|
| 761 |
+
INSERT INTO payments (sale_id, payment_date, payment_method, amount, rrn, reference)
|
| 762 |
+
VALUES (?, ?, ?, ?, ?, ?)
|
| 763 |
+
""",
|
| 764 |
+
(
|
| 765 |
+
sale_id,
|
| 766 |
+
payment["payment_date"],
|
| 767 |
+
payment["method"],
|
| 768 |
+
payment["amount"],
|
| 769 |
+
payment.get("rrn", ""),
|
| 770 |
+
payment.get("reference", ""),
|
| 771 |
+
),
|
| 772 |
+
)
|
| 773 |
+
|
| 774 |
+
conn.commit()
|
| 775 |
+
|
| 776 |
+
# Update payment status
|
| 777 |
+
self._update_payment_status(sale_id)
|
| 778 |
+
|
| 779 |
+
print(f"🔧 DEBUG: Sale {sale_id} completed successfully") # DEBUG
|
| 780 |
+
return sale_id
|
| 781 |
+
|
| 782 |
+
except Exception as e:
|
| 783 |
+
conn.rollback()
|
| 784 |
+
logger.error(f"Error adding sale: {e}")
|
| 785 |
+
print(f"❌ ERROR in add_sale: {e}") # DEBUG
|
| 786 |
+
raise
|
| 787 |
+
finally:
|
| 788 |
+
conn.close()
|
| 789 |
+
|
| 790 |
+
def _update_payment_status(self, sale_id: int):
|
| 791 |
+
"""Update payment status for a sale"""
|
| 792 |
+
conn = self.get_connection()
|
| 793 |
+
try:
|
| 794 |
+
# Get total paid amount
|
| 795 |
+
cursor = conn.cursor()
|
| 796 |
+
cursor.execute(
|
| 797 |
+
"SELECT COALESCE(SUM(amount), 0) FROM payments WHERE sale_id = ?",
|
| 798 |
+
(sale_id,),
|
| 799 |
+
)
|
| 800 |
+
total_paid = cursor.fetchone()[0]
|
| 801 |
+
|
| 802 |
+
# Get sale total
|
| 803 |
+
cursor.execute(
|
| 804 |
+
"SELECT total_amount FROM sales WHERE sale_id = ?", (sale_id,)
|
| 805 |
+
)
|
| 806 |
+
sale_total = cursor.fetchone()[0]
|
| 807 |
+
|
| 808 |
+
# Determine payment status
|
| 809 |
+
if total_paid >= sale_total:
|
| 810 |
+
status = "Paid"
|
| 811 |
+
elif total_paid > 0:
|
| 812 |
+
status = "Partial"
|
| 813 |
+
else:
|
| 814 |
+
status = "Pending"
|
| 815 |
+
|
| 816 |
+
# Update status
|
| 817 |
+
cursor.execute(
|
| 818 |
+
"UPDATE sales SET payment_status = ? WHERE sale_id = ?",
|
| 819 |
+
(status, sale_id),
|
| 820 |
+
)
|
| 821 |
+
conn.commit()
|
| 822 |
+
|
| 823 |
+
except Exception as e:
|
| 824 |
+
logger.error(f"Error updating payment status: {e}")
|
| 825 |
+
finally:
|
| 826 |
+
conn.close()
|
| 827 |
+
|
| 828 |
+
def get_pending_payments(self) -> pd.DataFrame:
|
| 829 |
+
"""Get all pending payments with customer details"""
|
| 830 |
+
return self.get_dataframe(
|
| 831 |
+
"sales",
|
| 832 |
+
"""
|
| 833 |
+
SELECT s.sale_id, s.invoice_no, s.sale_date, c.name as customer_name,
|
| 834 |
+
c.mobile, c.village, s.total_amount,
|
| 835 |
+
(s.total_amount - COALESCE(SUM(p.amount), 0)) as pending_amount,
|
| 836 |
+
COALESCE(SUM(p.amount), 0) as paid_amount
|
| 837 |
+
FROM sales s
|
| 838 |
+
LEFT JOIN customers c ON s.customer_id = c.customer_id
|
| 839 |
+
LEFT JOIN payments p ON s.sale_id = p.sale_id
|
| 840 |
+
WHERE s.payment_status IN ('Pending', 'Partial')
|
| 841 |
+
GROUP BY s.sale_id
|
| 842 |
+
HAVING pending_amount > 0
|
| 843 |
+
ORDER BY s.sale_date DESC
|
| 844 |
+
""",
|
| 845 |
+
)
|
| 846 |
+
|
| 847 |
+
def get_demo_conversions(self) -> pd.DataFrame:
|
| 848 |
+
"""Get demo conversion statistics with details"""
|
| 849 |
+
return self.get_dataframe(
|
| 850 |
+
"demos",
|
| 851 |
+
"""
|
| 852 |
+
SELECT d.*, c.name as customer_name, p.product_name,
|
| 853 |
+
dist.name as distributor_name, c.village, c.taluka,
|
| 854 |
+
CASE WHEN d.conversion_status = 'Converted' THEN 1 ELSE 0 END as converted
|
| 855 |
+
FROM demos d
|
| 856 |
+
LEFT JOIN customers c ON d.customer_id = c.customer_id
|
| 857 |
+
LEFT JOIN products p ON d.product_id = p.product_id
|
| 858 |
+
LEFT JOIN distributors dist ON d.distributor_id = dist.distributor_id
|
| 859 |
+
ORDER BY d.demo_date DESC
|
| 860 |
+
""",
|
| 861 |
+
)
|
| 862 |
+
|
| 863 |
+
def get_sales_analytics(self, start_date: str = None, end_date: str = None) -> Dict:
|
| 864 |
+
"""Get comprehensive sales analytics"""
|
| 865 |
+
if not start_date:
|
| 866 |
+
start_date = (datetime.now() - timedelta(days=30)).strftime("%Y-%m-%d")
|
| 867 |
+
if not end_date:
|
| 868 |
+
end_date = datetime.now().strftime("%Y-%m-%d")
|
| 869 |
+
|
| 870 |
+
query = """
|
| 871 |
+
SELECT
|
| 872 |
+
COUNT(*) as total_sales,
|
| 873 |
+
SUM(total_amount) as total_revenue,
|
| 874 |
+
AVG(total_amount) as avg_sale_value,
|
| 875 |
+
COUNT(DISTINCT customer_id) as unique_customers,
|
| 876 |
+
SUM(CASE WHEN payment_status = 'Paid' THEN 1 ELSE 0 END) as completed_payments,
|
| 877 |
+
SUM(CASE WHEN payment_status IN ('Pending', 'Partial') THEN 1 ELSE 0 END) as pending_payments
|
| 878 |
+
FROM sales
|
| 879 |
+
WHERE sale_date BETWEEN ? AND ?
|
| 880 |
+
"""
|
| 881 |
+
|
| 882 |
+
result = self.execute_query(query, (start_date, end_date), log_action=False)
|
| 883 |
+
|
| 884 |
+
if result:
|
| 885 |
+
row = result[0]
|
| 886 |
+
return {
|
| 887 |
+
"total_sales": row[0] or 0,
|
| 888 |
+
"total_revenue": row[1] or 0,
|
| 889 |
+
"avg_sale_value": row[2] or 0,
|
| 890 |
+
"unique_customers": row[3] or 0,
|
| 891 |
+
"completed_payments": row[4] or 0,
|
| 892 |
+
"pending_payments": row[5] or 0,
|
| 893 |
+
}
|
| 894 |
+
return {}
|
| 895 |
+
|
| 896 |
+
def log_system_action(
|
| 897 |
+
self,
|
| 898 |
+
log_type: str,
|
| 899 |
+
message: str,
|
| 900 |
+
table_name: str = None,
|
| 901 |
+
record_id: int = None,
|
| 902 |
+
action: str = None,
|
| 903 |
+
):
|
| 904 |
+
"""Log system actions for audit trail - without recursion"""
|
| 905 |
+
if self._is_logging:
|
| 906 |
+
return # Prevent recursion
|
| 907 |
+
|
| 908 |
+
try:
|
| 909 |
+
self._is_logging = True
|
| 910 |
+
self._execute_query_internal(
|
| 911 |
+
"""
|
| 912 |
+
INSERT INTO system_logs (log_type, log_message, table_name, record_id, action)
|
| 913 |
+
VALUES (?, ?, ?, ?, ?)
|
| 914 |
+
""",
|
| 915 |
+
(log_type, message, table_name, record_id, action),
|
| 916 |
+
)
|
| 917 |
+
except Exception as e:
|
| 918 |
+
logger.error(f"Error logging system action: {e}")
|
| 919 |
+
finally:
|
| 920 |
+
self._is_logging = False
|
| 921 |
+
|
| 922 |
+
def create_rollback_point(
|
| 923 |
+
self, table_name: str, record_id: int, old_data: str, new_data: str, action: str
|
| 924 |
+
):
|
| 925 |
+
"""Create a rollback point for data changes"""
|
| 926 |
+
try:
|
| 927 |
+
self.execute_query(
|
| 928 |
+
"""
|
| 929 |
+
INSERT INTO rollback_logs (table_name, record_id, old_data, new_data, action)
|
| 930 |
+
VALUES (?, ?, ?, ?, ?)
|
| 931 |
+
""",
|
| 932 |
+
(table_name, record_id, old_data, new_data, action),
|
| 933 |
+
log_action=False,
|
| 934 |
+
)
|
| 935 |
+
except Exception as e:
|
| 936 |
+
logger.error(f"Error creating rollback point: {e}")
|
| 937 |
+
|
| 938 |
+
def get_recent_activity(self, limit: int = 10) -> pd.DataFrame:
|
| 939 |
+
"""Get recent system activity"""
|
| 940 |
+
return self.get_dataframe(
|
| 941 |
+
"system_logs",
|
| 942 |
+
f"""
|
| 943 |
+
SELECT log_type, log_message, table_name, record_id, action, created_date
|
| 944 |
+
FROM system_logs
|
| 945 |
+
ORDER BY created_date DESC
|
| 946 |
+
LIMIT {limit}
|
| 947 |
+
""",
|
| 948 |
+
)
|
| 949 |
+
|
| 950 |
+
def backup_database(self, backup_path: str = None):
|
| 951 |
+
"""Create a database backup"""
|
| 952 |
+
if not backup_path:
|
| 953 |
+
backup_path = f"backup_{datetime.now().strftime('%Y%m%d_%H%M%S')}.db"
|
| 954 |
+
|
| 955 |
+
try:
|
| 956 |
+
conn = self.get_connection()
|
| 957 |
+
backup_conn = sqlite3.connect(backup_path)
|
| 958 |
+
|
| 959 |
+
with backup_conn:
|
| 960 |
+
conn.backup(backup_conn)
|
| 961 |
+
|
| 962 |
+
conn.close()
|
| 963 |
+
backup_conn.close()
|
| 964 |
+
|
| 965 |
+
logger.info(f"Database backup created: {backup_path}")
|
| 966 |
+
return backup_path
|
| 967 |
+
|
| 968 |
+
except Exception as e:
|
| 969 |
+
logger.error(f"Error creating database backup: {e}")
|
| 970 |
+
return None
|
| 971 |
+
|
| 972 |
+
def get_village_wise_sales(self) -> pd.DataFrame:
|
| 973 |
+
"""Get sales data grouped by village"""
|
| 974 |
+
return self.get_dataframe(
|
| 975 |
+
"sales",
|
| 976 |
+
"""
|
| 977 |
+
SELECT c.village, COUNT(s.sale_id) as total_sales,
|
| 978 |
+
SUM(s.total_amount) as total_revenue,
|
| 979 |
+
AVG(s.total_amount) as avg_sale_value,
|
| 980 |
+
COUNT(DISTINCT s.customer_id) as unique_customers
|
| 981 |
+
FROM sales s
|
| 982 |
+
JOIN customers c ON s.customer_id = c.customer_id
|
| 983 |
+
WHERE c.village IS NOT NULL AND c.village != ''
|
| 984 |
+
GROUP BY c.village
|
| 985 |
+
ORDER BY total_revenue DESC
|
| 986 |
+
""",
|
| 987 |
+
)
|
| 988 |
+
|
| 989 |
+
def get_product_performance(self) -> pd.DataFrame:
|
| 990 |
+
"""Get product performance analytics"""
|
| 991 |
+
return self.get_dataframe(
|
| 992 |
+
"sale_items",
|
| 993 |
+
"""
|
| 994 |
+
SELECT p.product_name, COUNT(si.item_id) as times_sold,
|
| 995 |
+
SUM(si.quantity) as total_quantity,
|
| 996 |
+
SUM(si.amount) as total_revenue,
|
| 997 |
+
AVG(si.rate) as avg_rate
|
| 998 |
+
FROM sale_items si
|
| 999 |
+
JOIN products p ON si.product_id = p.product_id
|
| 1000 |
+
GROUP BY p.product_id, p.product_name
|
| 1001 |
+
ORDER BY total_revenue DESC
|
| 1002 |
+
""",
|
| 1003 |
+
)
|
| 1004 |
+
|
| 1005 |
+
def get_upcoming_follow_ups(self) -> pd.DataFrame:
|
| 1006 |
+
"""Get upcoming follow-ups"""
|
| 1007 |
+
return self.get_dataframe(
|
| 1008 |
+
"follow_ups",
|
| 1009 |
+
"""
|
| 1010 |
+
SELECT f.*, c.name as customer_name, c.mobile,
|
| 1011 |
+
d.name as distributor_name, dm.demo_date
|
| 1012 |
+
FROM follow_ups f
|
| 1013 |
+
LEFT JOIN customers c ON f.customer_id = c.customer_id
|
| 1014 |
+
LEFT JOIN distributors d ON f.distributor_id = d.distributor_id
|
| 1015 |
+
LEFT JOIN demos dm ON f.demo_id = dm.demo_id
|
| 1016 |
+
WHERE f.follow_up_date >= date('now')
|
| 1017 |
+
AND f.status = 'Pending'
|
| 1018 |
+
ORDER BY f.follow_up_date ASC
|
| 1019 |
+
LIMIT 20
|
| 1020 |
+
""",
|
| 1021 |
+
)
|
| 1022 |
+
|
| 1023 |
+
def get_whatsapp_logs(self, customer_id: int = None) -> pd.DataFrame:
|
| 1024 |
+
"""Get WhatsApp communication logs"""
|
| 1025 |
+
if customer_id:
|
| 1026 |
+
return self.get_dataframe(
|
| 1027 |
+
"whatsapp_logs",
|
| 1028 |
+
"""
|
| 1029 |
+
SELECT w.*, c.name as customer_name, c.mobile
|
| 1030 |
+
FROM whatsapp_logs w
|
| 1031 |
+
LEFT JOIN customers c ON w.customer_id = c.customer_id
|
| 1032 |
+
WHERE w.customer_id = ?
|
| 1033 |
+
ORDER BY w.sent_date DESC
|
| 1034 |
+
""",
|
| 1035 |
+
(customer_id,),
|
| 1036 |
+
)
|
| 1037 |
+
else:
|
| 1038 |
+
return self.get_dataframe(
|
| 1039 |
+
"whatsapp_logs",
|
| 1040 |
+
"""
|
| 1041 |
+
SELECT w.*, c.name as customer_name, c.mobile
|
| 1042 |
+
FROM whatsapp_logs w
|
| 1043 |
+
LEFT JOIN customers c ON w.customer_id = c.customer_id
|
| 1044 |
+
ORDER BY w.sent_date DESC
|
| 1045 |
+
LIMIT 50
|
| 1046 |
+
""",
|
| 1047 |
+
)
|
| 1048 |
+
|
| 1049 |
+
def cleanup_old_data(self, days: int = 365):
|
| 1050 |
+
"""Clean up old data (logs, etc.) older than specified days"""
|
| 1051 |
+
try:
|
| 1052 |
+
cutoff_date = (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
|
| 1053 |
+
|
| 1054 |
+
# Clean system logs
|
| 1055 |
+
self.execute_query(
|
| 1056 |
+
"DELETE FROM system_logs WHERE created_date < ?",
|
| 1057 |
+
(cutoff_date,),
|
| 1058 |
+
log_action=False,
|
| 1059 |
+
)
|
| 1060 |
+
|
| 1061 |
+
# Clean rollback logs
|
| 1062 |
+
self.execute_query(
|
| 1063 |
+
"DELETE FROM rollback_logs WHERE rollback_date < ?",
|
| 1064 |
+
(cutoff_date,),
|
| 1065 |
+
log_action=False,
|
| 1066 |
+
)
|
| 1067 |
+
|
| 1068 |
+
logger.info(f"Cleaned up data older than {days} days")
|
| 1069 |
+
|
| 1070 |
+
except Exception as e:
|
| 1071 |
+
logger.error(f"Error cleaning up old data: {e}")
|
| 1072 |
+
|
| 1073 |
+
|
| 1074 |
+
# Utility function to check database health
|
| 1075 |
+
def check_database_health(db_path: str = "sales_management.db") -> Dict:
|
| 1076 |
+
"""Check database health and statistics"""
|
| 1077 |
+
try:
|
| 1078 |
+
db = DatabaseManager(db_path)
|
| 1079 |
+
|
| 1080 |
+
# Get table counts
|
| 1081 |
+
tables = ["customers", "sales", "distributors", "demos", "payments", "products"]
|
| 1082 |
+
counts = {}
|
| 1083 |
+
|
| 1084 |
+
for table in tables:
|
| 1085 |
+
result = db.execute_query(f"SELECT COUNT(*) FROM {table}", log_action=False)
|
| 1086 |
+
counts[table] = result[0][0] if result else 0
|
| 1087 |
+
|
| 1088 |
+
# Get database size
|
| 1089 |
+
db_size = os.path.getsize(db_path) if os.path.exists(db_path) else 0
|
| 1090 |
+
|
| 1091 |
+
return {
|
| 1092 |
+
"status": "healthy",
|
| 1093 |
+
"table_counts": counts,
|
| 1094 |
+
"database_size_mb": round(db_size / (1024 * 1024), 2),
|
| 1095 |
+
"last_backup": "N/A", # You can implement backup tracking
|
| 1096 |
+
"integrity_check": "passed", # You can add actual integrity checks
|
| 1097 |
+
}
|
| 1098 |
+
|
| 1099 |
+
except Exception as e:
|
| 1100 |
+
return {
|
| 1101 |
+
"status": "error",
|
| 1102 |
+
"error": str(e),
|
| 1103 |
+
"table_counts": {},
|
| 1104 |
+
"database_size_mb": 0,
|
| 1105 |
+
"integrity_check": "failed",
|
| 1106 |
+
}
|
main.py
ADDED
|
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import streamlit as st
|
| 2 |
+
import os
|
| 3 |
+
import sys
|
| 4 |
+
|
| 5 |
+
# Add the current directory to Python path
|
| 6 |
+
sys.path.append(os.path.dirname(__file__))
|
| 7 |
+
|
| 8 |
+
# MUST BE FIRST - Page configuration
|
| 9 |
+
st.set_page_config(
|
| 10 |
+
page_title="Sales Management System",
|
| 11 |
+
page_icon="📊",
|
| 12 |
+
layout="wide",
|
| 13 |
+
initial_sidebar_state="expanded"
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
# Import utilities - Create these if they don't exist
|
| 17 |
+
try:
|
| 18 |
+
from utils.styling import apply_custom_css
|
| 19 |
+
from utils.helpers import init_session_state, check_module_availability
|
| 20 |
+
from components.database_status import show_database_status
|
| 21 |
+
except ImportError:
|
| 22 |
+
# Create basic fallbacks if utils don't exist
|
| 23 |
+
def apply_custom_css():
|
| 24 |
+
st.markdown("""
|
| 25 |
+
<style>
|
| 26 |
+
.main-header { color: #1f77b4; }
|
| 27 |
+
.section-header { color: #2e86ab; margin-top: 2rem; }
|
| 28 |
+
</style>
|
| 29 |
+
""", unsafe_allow_html=True)
|
| 30 |
+
|
| 31 |
+
def init_session_state():
|
| 32 |
+
if 'db' not in st.session_state:
|
| 33 |
+
st.session_state.db = None
|
| 34 |
+
if 'data_processor' not in st.session_state:
|
| 35 |
+
st.session_state.data_processor = None
|
| 36 |
+
if 'analytics' not in st.session_state:
|
| 37 |
+
st.session_state.analytics = None
|
| 38 |
+
if 'whatsapp_manager' not in st.session_state:
|
| 39 |
+
st.session_state.whatsapp_manager = None
|
| 40 |
+
|
| 41 |
+
def check_module_availability():
|
| 42 |
+
try:
|
| 43 |
+
import pandas, plotly, sqlite3
|
| 44 |
+
MODULES_AVAILABLE = True
|
| 45 |
+
except ImportError:
|
| 46 |
+
MODULES_AVAILABLE = False
|
| 47 |
+
|
| 48 |
+
try:
|
| 49 |
+
import pywhatkit
|
| 50 |
+
WHATSAPP_AVAILABLE = True
|
| 51 |
+
except ImportError:
|
| 52 |
+
WHATSAPP_AVAILABLE = False
|
| 53 |
+
|
| 54 |
+
return MODULES_AVAILABLE, WHATSAPP_AVAILABLE
|
| 55 |
+
|
| 56 |
+
def show_database_status(db):
|
| 57 |
+
if db:
|
| 58 |
+
try:
|
| 59 |
+
health = db.execute_query("SELECT COUNT(*) FROM sqlite_master", log_action=False)
|
| 60 |
+
st.sidebar.success("✅ Database Connected")
|
| 61 |
+
except:
|
| 62 |
+
st.sidebar.error("❌ Database Error")
|
| 63 |
+
else:
|
| 64 |
+
st.sidebar.warning("⚠️ Database Not Initialized")
|
| 65 |
+
|
| 66 |
+
# Apply custom CSS
|
| 67 |
+
apply_custom_css()
|
| 68 |
+
|
| 69 |
+
# Initialize session state
|
| 70 |
+
init_session_state()
|
| 71 |
+
|
| 72 |
+
# Check module availability
|
| 73 |
+
MODULES_AVAILABLE, WHATSAPP_AVAILABLE = check_module_availability()
|
| 74 |
+
|
| 75 |
+
# Initialize components with error handling
|
| 76 |
+
if MODULES_AVAILABLE:
|
| 77 |
+
try:
|
| 78 |
+
from database import DatabaseManager
|
| 79 |
+
from data_processor import DataProcessor
|
| 80 |
+
from analytics import Analytics
|
| 81 |
+
|
| 82 |
+
if st.session_state.db is None:
|
| 83 |
+
st.session_state.db = DatabaseManager()
|
| 84 |
+
st.success("✅ Database initialized successfully!")
|
| 85 |
+
|
| 86 |
+
if st.session_state.data_processor is None:
|
| 87 |
+
st.session_state.data_processor = DataProcessor(st.session_state.db)
|
| 88 |
+
|
| 89 |
+
if st.session_state.analytics is None:
|
| 90 |
+
st.session_state.analytics = Analytics(st.session_state.db)
|
| 91 |
+
|
| 92 |
+
if WHATSAPP_AVAILABLE and st.session_state.whatsapp_manager is None:
|
| 93 |
+
try:
|
| 94 |
+
from whatsapp_manager import WhatsAppManager
|
| 95 |
+
st.session_state.whatsapp_manager = WhatsAppManager(st.session_state.db)
|
| 96 |
+
st.success("✅ WhatsApp Manager initialized!")
|
| 97 |
+
except Exception as e:
|
| 98 |
+
st.warning(f"⚠️ WhatsApp Manager not available: {e}")
|
| 99 |
+
st.session_state.whatsapp_manager = None
|
| 100 |
+
|
| 101 |
+
except Exception as e:
|
| 102 |
+
st.error(f"❌ Application initialization failed: {e}")
|
| 103 |
+
st.info("Please check that all required files are in the correct location.")
|
| 104 |
+
|
| 105 |
+
# Assign to local variables for easier access
|
| 106 |
+
db = st.session_state.db
|
| 107 |
+
data_processor = st.session_state.data_processor
|
| 108 |
+
analytics = st.session_state.analytics
|
| 109 |
+
whatsapp_manager = st.session_state.whatsapp_manager
|
| 110 |
+
|
| 111 |
+
# Add this in your main content area (before page routing)
|
| 112 |
+
st.sidebar.markdown("""
|
| 113 |
+
<div style='text-align: center; margin: 20px 0; margin-top:-10px'>
|
| 114 |
+
<img src='https://tse4.mm.bing.net/th/id/OIP.bvMgrnyDHrBdq_MmZeP8XgHaHa?rs=1&pid=ImgDetMain&o=7&rm=3'
|
| 115 |
+
style='width: 200px; height: auto; margin-bottom: 10px;'>
|
| 116 |
+
</div>
|
| 117 |
+
""", unsafe_allow_html=True)
|
| 118 |
+
st.sidebar.markdown("<h2 style='text-align: center;'>🚀 Sales Management</h2>", unsafe_allow_html=True)
|
| 119 |
+
|
| 120 |
+
page = st.sidebar.radio("Navigation", [
|
| 121 |
+
"📊 System Dashboard", "👥 Customers", "💰 Sales", "💳 Payments",
|
| 122 |
+
"🎯 Demos", "🤝 Distributors", "🔍 File Viewer", "📤 Data Import", "📊 Power BI Dashboard", "📈 Reports"
|
| 123 |
+
], index=0)
|
| 124 |
+
|
| 125 |
+
# app.py (add this)
|
| 126 |
+
from sales_data_processor import SalesDataProcessor
|
| 127 |
+
|
| 128 |
+
# Initialize in your main app
|
| 129 |
+
if 'data_processor' not in st.session_state:
|
| 130 |
+
st.session_state.data_processor = SalesDataProcessor(db)
|
| 131 |
+
|
| 132 |
+
def show_basic_dashboard(db, analytics):
|
| 133 |
+
st.title("📊 Sales Dashboard")
|
| 134 |
+
|
| 135 |
+
if db and analytics:
|
| 136 |
+
try:
|
| 137 |
+
sales_summary = analytics.get_sales_summary()
|
| 138 |
+
|
| 139 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 140 |
+
with col1:
|
| 141 |
+
st.metric("Total Sales", f"₹{sales_summary.get('total_sales', 0):,.0f}")
|
| 142 |
+
with col2:
|
| 143 |
+
st.metric("Pending Payments", f"₹{sales_summary.get('pending_amount', 0):,.0f}")
|
| 144 |
+
with col3:
|
| 145 |
+
st.metric("Total Transactions", sales_summary.get('total_transactions', 0))
|
| 146 |
+
with col4:
|
| 147 |
+
st.metric("Avg Sale", f"₹{sales_summary.get('avg_sale_value', 0):,.0f}")
|
| 148 |
+
|
| 149 |
+
except Exception as e:
|
| 150 |
+
st.error(f"Error loading dashboard data: {e}")
|
| 151 |
+
else:
|
| 152 |
+
st.warning("Database or analytics not available")
|
| 153 |
+
# Page routing with error handling
|
| 154 |
+
try:
|
| 155 |
+
if page == "📊 System Dashboard":
|
| 156 |
+
try:
|
| 157 |
+
from pages.system_dashboard import create_dashboard
|
| 158 |
+
create_dashboard(db, analytics)
|
| 159 |
+
except ImportError:
|
| 160 |
+
st.error("Dashboard page not available. Creating basic dashboard...")
|
| 161 |
+
show_basic_dashboard(db, analytics)
|
| 162 |
+
|
| 163 |
+
elif page == "👥 Customers":
|
| 164 |
+
try:
|
| 165 |
+
from pages.customers import show_customers_page
|
| 166 |
+
show_customers_page(db, whatsapp_manager)
|
| 167 |
+
except ImportError:
|
| 168 |
+
st.error("Customers page not available")
|
| 169 |
+
|
| 170 |
+
elif page == "💰 Sales":
|
| 171 |
+
try:
|
| 172 |
+
from pages.sales import show_sales_page
|
| 173 |
+
show_sales_page(db, whatsapp_manager)
|
| 174 |
+
except ImportError:
|
| 175 |
+
st.error("Sales page not available")
|
| 176 |
+
|
| 177 |
+
elif page == "🎯 Demos":
|
| 178 |
+
from pages.demos import show_demos_page
|
| 179 |
+
show_demos_page(db, whatsapp_manager)
|
| 180 |
+
|
| 181 |
+
elif page == "💳 Payments":
|
| 182 |
+
from pages.payments import show_payments_page
|
| 183 |
+
show_payments_page(db, whatsapp_manager)
|
| 184 |
+
|
| 185 |
+
elif page == "🤝 Distributors":
|
| 186 |
+
try:
|
| 187 |
+
from pages.distributors import show_distributors_page
|
| 188 |
+
show_distributors_page(db, whatsapp_manager)
|
| 189 |
+
except ImportError:
|
| 190 |
+
st.error("Distributors page not available")
|
| 191 |
+
|
| 192 |
+
elif page == "🔍 File Viewer":
|
| 193 |
+
try:
|
| 194 |
+
from pages.file_viewer import show_file_viewer_page
|
| 195 |
+
show_file_viewer_page(db, data_processor)
|
| 196 |
+
except ImportError:
|
| 197 |
+
st.error("File Viewer page not available")
|
| 198 |
+
|
| 199 |
+
elif page == "📤 Data Import":
|
| 200 |
+
try:
|
| 201 |
+
from pages.data_import import show_data_import_page
|
| 202 |
+
show_data_import_page(db, data_processor)
|
| 203 |
+
except ImportError:
|
| 204 |
+
st.error("Data Import page not available")
|
| 205 |
+
elif page == "📊 Power BI Dashboard":
|
| 206 |
+
try:
|
| 207 |
+
from pages.dashboard import create_dashboard
|
| 208 |
+
create_dashboard(db, analytics)
|
| 209 |
+
except ImportError:
|
| 210 |
+
st.error("Dashboard page not available. Creating basic dashboard...")
|
| 211 |
+
show_basic_dashboard(db, analytics)
|
| 212 |
+
|
| 213 |
+
elif page == "📈 Reports":
|
| 214 |
+
try:
|
| 215 |
+
from pages.reports import show_reports_page
|
| 216 |
+
show_reports_page(db,whatsapp_manager)
|
| 217 |
+
except ImportError:
|
| 218 |
+
st.error("Reports page not available")
|
| 219 |
+
|
| 220 |
+
except Exception as e:
|
| 221 |
+
st.error(f"Application error: {e}")
|
| 222 |
+
st.info("Please check the console for more details.")
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
st.sidebar.markdown("---")
|
| 228 |
+
st.sidebar.info("🚀 Sales Management System v2.0")
|
| 229 |
+
|
| 230 |
+
# Basic dashboard fallback
|
requirements.txt
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
streamlit==1.28.0
|
| 2 |
+
pandas==2.1.0
|
| 3 |
+
plotly==5.15.0
|
| 4 |
+
openpyxl==3.1.2
|
| 5 |
+
pywhatkit==5.4
|
| 6 |
+
schedule==1.2.0
|
| 7 |
+
deep-translator==1.11.4
|
| 8 |
+
numpy==1.24.0
|
sales_data_processor.py
ADDED
|
@@ -0,0 +1,502 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# sales_data_processor.py
|
| 2 |
+
import pandas as pd
|
| 3 |
+
import re
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
import numpy as np
|
| 6 |
+
import sqlite3
|
| 7 |
+
import os
|
| 8 |
+
import streamlit as st
|
| 9 |
+
|
| 10 |
+
class SalesDataProcessor:
|
| 11 |
+
def __init__(self, db):
|
| 12 |
+
self.db = db
|
| 13 |
+
self.setup_product_mapping()
|
| 14 |
+
self.setup_location_mapping()
|
| 15 |
+
self.setup_database_tables()
|
| 16 |
+
|
| 17 |
+
def setup_database_tables(self):
|
| 18 |
+
"""Initialize database tables if they don't exist"""
|
| 19 |
+
conn = self.db.get_connection()
|
| 20 |
+
cursor = conn.cursor()
|
| 21 |
+
|
| 22 |
+
# Create sales table
|
| 23 |
+
cursor.execute('''
|
| 24 |
+
CREATE TABLE IF NOT EXISTS sales (
|
| 25 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 26 |
+
source_sheet TEXT,
|
| 27 |
+
sr_no TEXT,
|
| 28 |
+
customer_name TEXT,
|
| 29 |
+
village TEXT,
|
| 30 |
+
taluka TEXT,
|
| 31 |
+
district TEXT,
|
| 32 |
+
invoice_no TEXT UNIQUE,
|
| 33 |
+
reference TEXT,
|
| 34 |
+
dispatch_date TEXT,
|
| 35 |
+
product_type TEXT,
|
| 36 |
+
quantity INTEGER,
|
| 37 |
+
rate_per_unit REAL,
|
| 38 |
+
amount REAL,
|
| 39 |
+
final_amount REAL,
|
| 40 |
+
total_liters REAL,
|
| 41 |
+
payment_date TEXT,
|
| 42 |
+
gpay_amount REAL,
|
| 43 |
+
cash_amount REAL,
|
| 44 |
+
cheque_amount REAL,
|
| 45 |
+
rrn_number TEXT,
|
| 46 |
+
sold_by TEXT,
|
| 47 |
+
sale_type TEXT,
|
| 48 |
+
payment_status TEXT,
|
| 49 |
+
payment_method TEXT,
|
| 50 |
+
processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 51 |
+
source_file TEXT
|
| 52 |
+
)
|
| 53 |
+
''')
|
| 54 |
+
|
| 55 |
+
# Create customers table (aggregated from sales)
|
| 56 |
+
cursor.execute('''
|
| 57 |
+
CREATE TABLE IF NOT EXISTS customers (
|
| 58 |
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 59 |
+
customer_name TEXT,
|
| 60 |
+
village TEXT,
|
| 61 |
+
taluka TEXT,
|
| 62 |
+
district TEXT,
|
| 63 |
+
total_purchases REAL DEFAULT 0,
|
| 64 |
+
total_orders INTEGER DEFAULT 0,
|
| 65 |
+
last_order_date TEXT,
|
| 66 |
+
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
| 67 |
+
)
|
| 68 |
+
''')
|
| 69 |
+
|
| 70 |
+
conn.commit()
|
| 71 |
+
conn.close()
|
| 72 |
+
|
| 73 |
+
def setup_product_mapping(self):
|
| 74 |
+
"""Standard product mapping for all packaging types"""
|
| 75 |
+
self.PRODUCT_MAPPING = {
|
| 76 |
+
'1 LTR PLASTIC JAR': '1L_PLASTIC_JAR',
|
| 77 |
+
'2 LTR PLASTIC JAR': '2L_PLASTIC_JAR',
|
| 78 |
+
'5 LTR PLASTIC JAR': '5L_PLASTIC_JAR',
|
| 79 |
+
'10 LTR PLASTIC JAR': '10L_PLASTIC_JAR',
|
| 80 |
+
'5 LTR STEEL BARNI': '5L_STEEL_BARNI',
|
| 81 |
+
'10 LTR STEEL BARNI': '10L_STEEL_BARNI',
|
| 82 |
+
'20 LTR STEEL BARNI': '20L_STEEL_BARNI',
|
| 83 |
+
'20 LTR PLASTIC CAN': '20L_PLASTIC_CAN',
|
| 84 |
+
'1 LTR PET BOTTLE': '1L_PET_BOTTLE',
|
| 85 |
+
'20 LTR CARBO': '20L_CARBO'
|
| 86 |
+
}
|
| 87 |
+
|
| 88 |
+
def setup_location_mapping(self):
|
| 89 |
+
"""Gujarati location name standardization"""
|
| 90 |
+
self.GUJARATI_LOCALITIES = {
|
| 91 |
+
'રામપુરા': 'RAMPURA',
|
| 92 |
+
'શેખડી': 'SHEKHADI',
|
| 93 |
+
'સિંહોલ': 'SINHOL',
|
| 94 |
+
'વનાદરા': 'VANADARA',
|
| 95 |
+
'માવલી': 'MAVLI',
|
| 96 |
+
'સિમરડા': 'SIMRADA',
|
| 97 |
+
'બિલપડ': 'BILPAD',
|
| 98 |
+
'વઘોડિયા': 'VAGHODIA',
|
| 99 |
+
'સાકરિયા': 'SAKARIYA'
|
| 100 |
+
}
|
| 101 |
+
|
| 102 |
+
def safe_float(self, value):
|
| 103 |
+
"""Safely convert to float, handle errors"""
|
| 104 |
+
if pd.isna(value) or value in ['', 'NOT_AVAILABLE', None, '_']:
|
| 105 |
+
return 0.0
|
| 106 |
+
try:
|
| 107 |
+
return float(value)
|
| 108 |
+
except (ValueError, TypeError):
|
| 109 |
+
return 0.0
|
| 110 |
+
|
| 111 |
+
def safe_int(self, value):
|
| 112 |
+
"""Safely convert to integer"""
|
| 113 |
+
return int(self.safe_float(value))
|
| 114 |
+
|
| 115 |
+
def parse_date(self, date_str):
|
| 116 |
+
"""Handle all date formats intelligently"""
|
| 117 |
+
if pd.isna(date_str) or date_str in ['', 'NOT_AVAILABLE', None, '_']:
|
| 118 |
+
return 'NOT_AVAILABLE'
|
| 119 |
+
|
| 120 |
+
if isinstance(date_str, (int, float)):
|
| 121 |
+
try:
|
| 122 |
+
return (datetime(1899, 12, 30) + pd.Timedelta(days=date_str)).strftime('%Y-%m-%d')
|
| 123 |
+
except:
|
| 124 |
+
return 'INVALID_DATE'
|
| 125 |
+
|
| 126 |
+
date_str = str(date_str).strip()
|
| 127 |
+
|
| 128 |
+
date_formats = [
|
| 129 |
+
'%Y-%m-%d %H:%M:%S',
|
| 130 |
+
'%d/%m/%Y',
|
| 131 |
+
'%Y-%m-%d',
|
| 132 |
+
'%d-%m-%Y',
|
| 133 |
+
'%d/%m/%Y %H:%M:%S'
|
| 134 |
+
]
|
| 135 |
+
|
| 136 |
+
for fmt in date_formats:
|
| 137 |
+
try:
|
| 138 |
+
return datetime.strptime(date_str, fmt).strftime('%Y-%m-%d')
|
| 139 |
+
except ValueError:
|
| 140 |
+
continue
|
| 141 |
+
|
| 142 |
+
return 'INVALID_DATE'
|
| 143 |
+
|
| 144 |
+
def clean_name(self, name):
|
| 145 |
+
"""Handle names, duplicates, variations"""
|
| 146 |
+
if pd.isna(name) or name in ['', '-', '_', None]:
|
| 147 |
+
return 'NOT_AVAILABLE'
|
| 148 |
+
name = ' '.join(str(name).strip().split())
|
| 149 |
+
return name
|
| 150 |
+
|
| 151 |
+
def standardize_location(self, location):
|
| 152 |
+
"""Handle Gujarati location names"""
|
| 153 |
+
if pd.isna(location) or location in ['', 'NOT_AVAILABLE', None]:
|
| 154 |
+
return 'NOT_AVAILABLE'
|
| 155 |
+
|
| 156 |
+
location_str = str(location).strip()
|
| 157 |
+
|
| 158 |
+
if isinstance(location_str, str):
|
| 159 |
+
for guj_name, eng_name in self.GUJARATI_LOCALITIES.items():
|
| 160 |
+
if guj_name in location_str:
|
| 161 |
+
return eng_name
|
| 162 |
+
|
| 163 |
+
return location_str.upper()
|
| 164 |
+
|
| 165 |
+
def standardize_product(self, product_name):
|
| 166 |
+
"""Convert any product name to standard format"""
|
| 167 |
+
if pd.isna(product_name) or product_name in ['', 'NOT_AVAILABLE', None]:
|
| 168 |
+
return 'UNKNOWN_PRODUCT'
|
| 169 |
+
|
| 170 |
+
product_str = str(product_name).strip()
|
| 171 |
+
product_upper = product_str.upper()
|
| 172 |
+
|
| 173 |
+
for key, value in self.PRODUCT_MAPPING.items():
|
| 174 |
+
if key in product_upper:
|
| 175 |
+
return value
|
| 176 |
+
|
| 177 |
+
# Fuzzy matching
|
| 178 |
+
if '1 LTR' in product_upper or '1L' in product_upper:
|
| 179 |
+
if 'PLASTIC' in product_upper or 'JAR' in product_upper:
|
| 180 |
+
return '1L_PLASTIC_JAR'
|
| 181 |
+
elif 'PET' in product_upper or 'BOTTLE' in product_upper:
|
| 182 |
+
return '1L_PET_BOTTLE'
|
| 183 |
+
elif '2 LTR' in product_upper or '2L' in product_upper:
|
| 184 |
+
return '2L_PLASTIC_JAR'
|
| 185 |
+
elif '5 LTR' in product_upper or '5L' in product_upper:
|
| 186 |
+
if 'STEEL' in product_upper or 'BARNI' in product_upper:
|
| 187 |
+
return '5L_STEEL_BARNI'
|
| 188 |
+
else:
|
| 189 |
+
return '5L_PLASTIC_JAR'
|
| 190 |
+
elif '10 LTR' in product_upper or '10L' in product_upper:
|
| 191 |
+
if 'STEEL' in product_upper or 'BARNI' in product_upper:
|
| 192 |
+
return '10L_STEEL_BARNI'
|
| 193 |
+
else:
|
| 194 |
+
return '10L_PLASTIC_JAR'
|
| 195 |
+
elif '20 LTR' in product_upper or '20L' in product_upper:
|
| 196 |
+
if 'STEEL' in product_upper or 'BARNI' in product_upper:
|
| 197 |
+
return '20L_STEEL_BARNI'
|
| 198 |
+
elif 'PLASTIC' in product_upper or 'CAN' in product_upper:
|
| 199 |
+
return '20L_PLASTIC_CAN'
|
| 200 |
+
elif 'CARBO' in product_upper:
|
| 201 |
+
return '20L_CARBO'
|
| 202 |
+
|
| 203 |
+
return f"UNKNOWN_{product_upper.replace(' ', '_')}"
|
| 204 |
+
|
| 205 |
+
def detect_sale_type(self, row):
|
| 206 |
+
"""Detect if it's demo sale (single unit) or bulk sale"""
|
| 207 |
+
quantity = self.safe_int(row.get('QTN', 0))
|
| 208 |
+
reference = str(row.get('REF.', '')).upper()
|
| 209 |
+
|
| 210 |
+
if reference == 'DEMO' or quantity == 1:
|
| 211 |
+
return 'DEMO_SALE'
|
| 212 |
+
else:
|
| 213 |
+
return 'BULK_SALE'
|
| 214 |
+
|
| 215 |
+
def calculate_payment_status(self, row):
|
| 216 |
+
"""Determine payment status intelligently"""
|
| 217 |
+
final_amt = self.safe_float(row.get('FINAL AMT', 0))
|
| 218 |
+
gpay = self.safe_float(row.get('G-PAY', 0))
|
| 219 |
+
cash = self.safe_float(row.get('CASH', 0))
|
| 220 |
+
cheque = self.safe_float(row.get('CHQ', 0))
|
| 221 |
+
|
| 222 |
+
paid_amt = gpay + cash + cheque
|
| 223 |
+
|
| 224 |
+
if paid_amt >= final_amt:
|
| 225 |
+
return 'PAID'
|
| 226 |
+
elif paid_amt > 0:
|
| 227 |
+
return 'PARTIAL_PAID'
|
| 228 |
+
elif self.parse_date(row.get('PAYMENT DATE')) not in ['NOT_AVAILABLE', 'INVALID_DATE']:
|
| 229 |
+
return 'PENDING'
|
| 230 |
+
else:
|
| 231 |
+
return 'UNPAID'
|
| 232 |
+
|
| 233 |
+
def detect_payment_method(self, row):
|
| 234 |
+
"""Intelligently detect payment method"""
|
| 235 |
+
gpay = self.safe_float(row.get('G-PAY', 0))
|
| 236 |
+
cash = self.safe_float(row.get('CASH', 0))
|
| 237 |
+
cheque = self.safe_float(row.get('CHQ', 0))
|
| 238 |
+
|
| 239 |
+
if gpay > 0:
|
| 240 |
+
return 'GPAY'
|
| 241 |
+
elif cash > 0:
|
| 242 |
+
return 'CASH'
|
| 243 |
+
elif cheque > 0:
|
| 244 |
+
return 'CHEQUE'
|
| 245 |
+
else:
|
| 246 |
+
return 'NOT_PAID'
|
| 247 |
+
|
| 248 |
+
def process_dataframe(self, df, sheet_name, source_file):
|
| 249 |
+
"""Process entire dataframe and standardize all records"""
|
| 250 |
+
standardized_records = []
|
| 251 |
+
|
| 252 |
+
for idx, row in df.iterrows():
|
| 253 |
+
if (pd.isna(row.get('NAME', '')) and
|
| 254 |
+
pd.isna(row.get('PACKING', '')) and
|
| 255 |
+
pd.isna(row.get('INV NO', ''))):
|
| 256 |
+
continue
|
| 257 |
+
|
| 258 |
+
try:
|
| 259 |
+
standardized_record = self.standardize_record(row, sheet_name, source_file)
|
| 260 |
+
standardized_records.append(standardized_record)
|
| 261 |
+
except Exception as e:
|
| 262 |
+
st.error(f"⚠️ Error processing row {idx}: {e}")
|
| 263 |
+
continue
|
| 264 |
+
|
| 265 |
+
return standardized_records
|
| 266 |
+
|
| 267 |
+
def standardize_record(self, row, sheet_name, source_file):
|
| 268 |
+
"""Standardize a single record"""
|
| 269 |
+
record = {
|
| 270 |
+
'source_sheet': sheet_name,
|
| 271 |
+
'sr_no': self.clean_name(row.get('SR NO.', 'NOT_AVAILABLE')),
|
| 272 |
+
'customer_name': self.clean_name(row.get('NAME', 'NOT_AVAILABLE')),
|
| 273 |
+
'village': self.standardize_location(row.get('VILLAGE', 'NOT_AVAILABLE')),
|
| 274 |
+
'taluka': self.standardize_location(row.get('TALUKA', 'NOT_AVAILABLE')),
|
| 275 |
+
'district': self.standardize_location(row.get('DISTRICT', 'NOT_AVAILABLE')),
|
| 276 |
+
'invoice_no': self.clean_name(row.get('INV NO', 'NOT_AVAILABLE')),
|
| 277 |
+
'reference': self.clean_name(row.get('REF.', 'NOT_AVAILABLE')),
|
| 278 |
+
'dispatch_date': self.parse_date(row.get('DISPATCH DATE')),
|
| 279 |
+
'product_type': self.standardize_product(row.get('PACKING', 'NOT_AVAILABLE')),
|
| 280 |
+
'quantity': self.safe_int(row.get('QTN', 0)),
|
| 281 |
+
'rate_per_unit': self.safe_float(row.get('RATE', 0)),
|
| 282 |
+
'amount': self.safe_float(row.get('AMT', 0)),
|
| 283 |
+
'final_amount': self.safe_float(row.get('FINAL AMT', 0)),
|
| 284 |
+
'total_liters': self.safe_float(row.get('TOTAL LTR', 0)),
|
| 285 |
+
'payment_date': self.parse_date(row.get('PAYMENT DATE')),
|
| 286 |
+
'gpay_amount': self.safe_float(row.get('G-PAY', 0)),
|
| 287 |
+
'cash_amount': self.safe_float(row.get('CASH', 0)),
|
| 288 |
+
'cheque_amount': self.safe_float(row.get('CHQ', 0)),
|
| 289 |
+
'rrn_number': self.clean_name(row.get('RRN', 'NOT_AVAILABLE')),
|
| 290 |
+
'sold_by': self.clean_name(row.get('BY', 'NOT_AVAILABLE')),
|
| 291 |
+
'sale_type': self.detect_sale_type(row),
|
| 292 |
+
'payment_status': self.calculate_payment_status(row),
|
| 293 |
+
'payment_method': self.detect_payment_method(row),
|
| 294 |
+
'source_file': os.path.basename(source_file)
|
| 295 |
+
}
|
| 296 |
+
|
| 297 |
+
# Auto-calculate missing amounts
|
| 298 |
+
if record['amount'] == 0 and record['quantity'] > 0 and record['rate_per_unit'] > 0:
|
| 299 |
+
record['amount'] = record['quantity'] * record['rate_per_unit']
|
| 300 |
+
|
| 301 |
+
if record['final_amount'] == 0 and record['amount'] > 0:
|
| 302 |
+
record['final_amount'] = record['amount']
|
| 303 |
+
|
| 304 |
+
return record
|
| 305 |
+
|
| 306 |
+
def insert_into_database(self, records):
|
| 307 |
+
"""Insert processed records into database"""
|
| 308 |
+
conn = self.db.get_connection()
|
| 309 |
+
cursor = conn.cursor()
|
| 310 |
+
|
| 311 |
+
inserted_count = 0
|
| 312 |
+
updated_count = 0
|
| 313 |
+
|
| 314 |
+
for record in records:
|
| 315 |
+
try:
|
| 316 |
+
# Check if invoice already exists
|
| 317 |
+
cursor.execute('SELECT id FROM sales WHERE invoice_no = ?', (record['invoice_no'],))
|
| 318 |
+
existing = cursor.fetchone()
|
| 319 |
+
|
| 320 |
+
if existing:
|
| 321 |
+
# Update existing record
|
| 322 |
+
update_query = '''
|
| 323 |
+
UPDATE sales SET
|
| 324 |
+
source_sheet=?, sr_no=?, customer_name=?, village=?, taluka=?, district=?,
|
| 325 |
+
reference=?, dispatch_date=?, product_type=?, quantity=?, rate_per_unit=?,
|
| 326 |
+
amount=?, final_amount=?, total_liters=?, payment_date=?, gpay_amount=?,
|
| 327 |
+
cash_amount=?, cheque_amount=?, rrn_number=?, sold_by=?, sale_type=?,
|
| 328 |
+
payment_status=?, payment_method=?, source_file=?
|
| 329 |
+
WHERE invoice_no=?
|
| 330 |
+
'''
|
| 331 |
+
cursor.execute(update_query, (
|
| 332 |
+
record['source_sheet'], record['sr_no'], record['customer_name'],
|
| 333 |
+
record['village'], record['taluka'], record['district'],
|
| 334 |
+
record['reference'], record['dispatch_date'], record['product_type'],
|
| 335 |
+
record['quantity'], record['rate_per_unit'], record['amount'],
|
| 336 |
+
record['final_amount'], record['total_liters'], record['payment_date'],
|
| 337 |
+
record['gpay_amount'], record['cash_amount'], record['cheque_amount'],
|
| 338 |
+
record['rrn_number'], record['sold_by'], record['sale_type'],
|
| 339 |
+
record['payment_status'], record['payment_method'], record['source_file'],
|
| 340 |
+
record['invoice_no']
|
| 341 |
+
))
|
| 342 |
+
updated_count += 1
|
| 343 |
+
else:
|
| 344 |
+
# Insert new record
|
| 345 |
+
insert_query = '''
|
| 346 |
+
INSERT INTO sales (
|
| 347 |
+
source_sheet, sr_no, customer_name, village, taluka, district,
|
| 348 |
+
invoice_no, reference, dispatch_date, product_type, quantity,
|
| 349 |
+
rate_per_unit, amount, final_amount, total_liters, payment_date,
|
| 350 |
+
gpay_amount, cash_amount, cheque_amount, rrn_number, sold_by,
|
| 351 |
+
sale_type, payment_status, payment_method, source_file
|
| 352 |
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
| 353 |
+
'''
|
| 354 |
+
cursor.execute(insert_query, (
|
| 355 |
+
record['source_sheet'], record['sr_no'], record['customer_name'],
|
| 356 |
+
record['village'], record['taluka'], record['district'],
|
| 357 |
+
record['invoice_no'], record['reference'], record['dispatch_date'],
|
| 358 |
+
record['product_type'], record['quantity'], record['rate_per_unit'],
|
| 359 |
+
record['amount'], record['final_amount'], record['total_liters'],
|
| 360 |
+
record['payment_date'], record['gpay_amount'], record['cash_amount'],
|
| 361 |
+
record['cheque_amount'], record['rrn_number'], record['sold_by'],
|
| 362 |
+
record['sale_type'], record['payment_status'], record['payment_method'],
|
| 363 |
+
record['source_file']
|
| 364 |
+
))
|
| 365 |
+
inserted_count += 1
|
| 366 |
+
|
| 367 |
+
except Exception as e:
|
| 368 |
+
st.error(f"❌ Database error for invoice {record['invoice_no']}: {e}")
|
| 369 |
+
continue
|
| 370 |
+
|
| 371 |
+
conn.commit()
|
| 372 |
+
|
| 373 |
+
# Update customers table
|
| 374 |
+
self.update_customers_table(conn)
|
| 375 |
+
|
| 376 |
+
conn.close()
|
| 377 |
+
|
| 378 |
+
return inserted_count, updated_count
|
| 379 |
+
|
| 380 |
+
def update_customers_table(self, conn):
|
| 381 |
+
"""Update customers table from sales data"""
|
| 382 |
+
cursor = conn.cursor()
|
| 383 |
+
|
| 384 |
+
# Clear and rebuild customers table
|
| 385 |
+
cursor.execute('DELETE FROM customers')
|
| 386 |
+
|
| 387 |
+
# Insert aggregated customer data
|
| 388 |
+
cursor.execute('''
|
| 389 |
+
INSERT INTO customers (customer_name, village, taluka, district, total_purchases, total_orders, last_order_date)
|
| 390 |
+
SELECT
|
| 391 |
+
customer_name,
|
| 392 |
+
village,
|
| 393 |
+
taluka,
|
| 394 |
+
district,
|
| 395 |
+
SUM(final_amount) as total_purchases,
|
| 396 |
+
COUNT(*) as total_orders,
|
| 397 |
+
MAX(dispatch_date) as last_order_date
|
| 398 |
+
FROM sales
|
| 399 |
+
WHERE customer_name != 'NOT_AVAILABLE'
|
| 400 |
+
GROUP BY customer_name, village, taluka, district
|
| 401 |
+
''')
|
| 402 |
+
|
| 403 |
+
conn.commit()
|
| 404 |
+
|
| 405 |
+
def process_excel_file(self, file_path):
|
| 406 |
+
"""Main method to process Excel file - called from Streamlit"""
|
| 407 |
+
try:
|
| 408 |
+
st.info(f"🔄 Processing: {os.path.basename(file_path)}")
|
| 409 |
+
|
| 410 |
+
# Read the Excel file
|
| 411 |
+
xl = pd.ExcelFile(file_path)
|
| 412 |
+
|
| 413 |
+
# Process each sheet
|
| 414 |
+
all_records = []
|
| 415 |
+
|
| 416 |
+
for sheet_name in xl.sheet_names:
|
| 417 |
+
with st.spinner(f"Processing sheet: {sheet_name}..."):
|
| 418 |
+
# Read sheet
|
| 419 |
+
df = pd.read_excel(file_path, sheet_name=sheet_name)
|
| 420 |
+
|
| 421 |
+
# Standardize data
|
| 422 |
+
standardized_records = self.process_dataframe(df, sheet_name, file_path)
|
| 423 |
+
all_records.extend(standardized_records)
|
| 424 |
+
|
| 425 |
+
if not all_records:
|
| 426 |
+
st.warning("⚠️ No valid records found in the file")
|
| 427 |
+
return False
|
| 428 |
+
|
| 429 |
+
# Insert into database
|
| 430 |
+
with st.spinner("Inserting into database..."):
|
| 431 |
+
inserted, updated = self.insert_into_database(all_records)
|
| 432 |
+
|
| 433 |
+
# Show results
|
| 434 |
+
if inserted > 0 or updated > 0:
|
| 435 |
+
st.success(f"✅ Processed {len(all_records)} records from {os.path.basename(file_path)}")
|
| 436 |
+
st.success(f"📊 New: {inserted}, Updated: {updated}")
|
| 437 |
+
|
| 438 |
+
# Show quick summary
|
| 439 |
+
self.show_import_summary(all_records)
|
| 440 |
+
return True
|
| 441 |
+
else:
|
| 442 |
+
st.warning("⚠️ No records were inserted or updated")
|
| 443 |
+
return False
|
| 444 |
+
|
| 445 |
+
except Exception as e:
|
| 446 |
+
st.error(f"❌ Error processing file: {e}")
|
| 447 |
+
return False
|
| 448 |
+
|
| 449 |
+
def show_import_summary(self, records):
|
| 450 |
+
"""Show summary of imported data"""
|
| 451 |
+
if not records:
|
| 452 |
+
return
|
| 453 |
+
|
| 454 |
+
df = pd.DataFrame(records)
|
| 455 |
+
|
| 456 |
+
col1, col2, col3, col4 = st.columns(4)
|
| 457 |
+
|
| 458 |
+
with col1:
|
| 459 |
+
st.metric("Total Records", len(records))
|
| 460 |
+
with col2:
|
| 461 |
+
demo_sales = len(df[df['sale_type'] == 'DEMO_SALE'])
|
| 462 |
+
st.metric("Demo Sales", demo_sales)
|
| 463 |
+
with col3:
|
| 464 |
+
bulk_sales = len(df[df['sale_type'] == 'BULK_SALE'])
|
| 465 |
+
st.metric("Bulk Sales", bulk_sales)
|
| 466 |
+
with col4:
|
| 467 |
+
total_amount = df['final_amount'].sum()
|
| 468 |
+
st.metric("Total Amount", f"₹{total_amount:,.2f}")
|
| 469 |
+
|
| 470 |
+
# Show top products
|
| 471 |
+
st.subheader("📦 Products Imported")
|
| 472 |
+
product_summary = df['product_type'].value_counts().head(5)
|
| 473 |
+
for product, count in product_summary.items():
|
| 474 |
+
st.write(f"- {product}: {count} records")
|
| 475 |
+
|
| 476 |
+
def get_import_stats(self):
|
| 477 |
+
"""Get import statistics for dashboard"""
|
| 478 |
+
conn = self.db.get_connection()
|
| 479 |
+
|
| 480 |
+
try:
|
| 481 |
+
# Total records
|
| 482 |
+
total_records = pd.read_sql('SELECT COUNT(*) as count FROM sales', conn)['count'].iloc[0]
|
| 483 |
+
|
| 484 |
+
# Files processed
|
| 485 |
+
files_processed = pd.read_sql('SELECT COUNT(DISTINCT source_file) as count FROM sales', conn)['count'].iloc[0]
|
| 486 |
+
|
| 487 |
+
# Recent imports
|
| 488 |
+
recent_imports = pd.read_sql('''
|
| 489 |
+
SELECT source_file, COUNT(*) as records, MAX(processed_at) as last_import
|
| 490 |
+
FROM sales
|
| 491 |
+
GROUP BY source_file
|
| 492 |
+
ORDER BY last_import DESC
|
| 493 |
+
LIMIT 5
|
| 494 |
+
''', conn)
|
| 495 |
+
|
| 496 |
+
return {
|
| 497 |
+
'total_records': total_records,
|
| 498 |
+
'files_processed': files_processed,
|
| 499 |
+
'recent_imports': recent_imports.to_dict('records')
|
| 500 |
+
}
|
| 501 |
+
finally:
|
| 502 |
+
conn.close()
|
sales_management.db
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:6ae6dc8f1fb2158a1a11b75081b08290d985cba6ea96bf8943761f3cb0e4dc0d
|
| 3 |
+
size 729088
|
sales_manager.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# enhanced_sales_manager.py
|
| 2 |
+
from datetime import datetime, timedelta
|
| 3 |
+
import pandas as pd
|
| 4 |
+
import sqlite3
|
| 5 |
+
|
| 6 |
+
class EnhancedSalesManager:
|
| 7 |
+
def __init__(self, db_manager, data_processor):
|
| 8 |
+
self.db_manager = db_manager
|
| 9 |
+
self.data_processor = data_processor
|
| 10 |
+
|
| 11 |
+
def batch_import_data(self, directory_path):
|
| 12 |
+
"""Import all data from a directory"""
|
| 13 |
+
return self.data_processor.process_directory(directory_path)
|
| 14 |
+
|
| 15 |
+
def get_customer_insights(self):
|
| 16 |
+
"""Get comprehensive customer insights"""
|
| 17 |
+
conn = self.db_manager.get_connection()
|
| 18 |
+
|
| 19 |
+
# Top customers by spending
|
| 20 |
+
top_customers = pd.read_sql('''
|
| 21 |
+
SELECT c.name, c.village, SUM(s.total_amount) as total_spent, COUNT(s.sale_id) as purchases
|
| 22 |
+
FROM customers c
|
| 23 |
+
JOIN sales s ON c.customer_id = s.customer_id
|
| 24 |
+
GROUP BY c.customer_id
|
| 25 |
+
ORDER BY total_spent DESC
|
| 26 |
+
LIMIT 20
|
| 27 |
+
''', conn)
|
| 28 |
+
|
| 29 |
+
# Village performance
|
| 30 |
+
village_performance = pd.read_sql('''
|
| 31 |
+
SELECT village, COUNT(DISTINCT customer_id) as customers,
|
| 32 |
+
SUM(total_amount) as total_sales, AVG(total_amount) as avg_sale
|
| 33 |
+
FROM sales s
|
| 34 |
+
JOIN customers c ON s.customer_id = c.customer_id
|
| 35 |
+
GROUP BY village
|
| 36 |
+
ORDER BY total_sales DESC
|
| 37 |
+
''', conn)
|
| 38 |
+
|
| 39 |
+
return {
|
| 40 |
+
'top_customers': top_customers,
|
| 41 |
+
'village_performance': village_performance
|
| 42 |
+
}
|
| 43 |
+
|
| 44 |
+
def generate_comprehensive_report(self, start_date=None, end_date=None):
|
| 45 |
+
"""Generate detailed business intelligence report"""
|
| 46 |
+
if not start_date:
|
| 47 |
+
start_date = (datetime.now() - timedelta(days=30)).strftime('%Y-%m-%d')
|
| 48 |
+
if not end_date:
|
| 49 |
+
end_date = datetime.now().strftime('%Y-%m-%d')
|
| 50 |
+
|
| 51 |
+
conn = self.db_manager.get_connection()
|
| 52 |
+
|
| 53 |
+
reports = {}
|
| 54 |
+
|
| 55 |
+
# Sales trends
|
| 56 |
+
reports['sales_trends'] = pd.read_sql(f'''
|
| 57 |
+
SELECT DATE(sale_date) as date, SUM(total_amount) as daily_sales,
|
| 58 |
+
SUM(total_liters) as daily_liters, COUNT(*) as transactions
|
| 59 |
+
FROM sales
|
| 60 |
+
WHERE sale_date BETWEEN '{start_date}' AND '{end_date}'
|
| 61 |
+
GROUP BY DATE(sale_date)
|
| 62 |
+
ORDER BY date
|
| 63 |
+
''', conn)
|
| 64 |
+
|
| 65 |
+
# Product performance
|
| 66 |
+
reports['product_performance'] = pd.read_sql(f'''
|
| 67 |
+
SELECT p.packing_type, p.capacity_ltr, SUM(si.quantity) as total_quantity,
|
| 68 |
+
SUM(si.amount) as total_revenue, COUNT(DISTINCT s.sale_id) as transactions
|
| 69 |
+
FROM sale_items si
|
| 70 |
+
JOIN products p ON si.product_id = p.product_id
|
| 71 |
+
JOIN sales s ON si.sale_id = s.sale_id
|
| 72 |
+
WHERE s.sale_date BETWEEN '{start_date}' AND '{end_date}'
|
| 73 |
+
GROUP BY p.product_id
|
| 74 |
+
ORDER BY total_revenue DESC
|
| 75 |
+
''', conn)
|
| 76 |
+
|
| 77 |
+
# Payment analysis
|
| 78 |
+
reports['payment_analysis'] = pd.read_sql(f'''
|
| 79 |
+
SELECT
|
| 80 |
+
CASE
|
| 81 |
+
WHEN payment_date IS NOT NULL THEN 'Paid'
|
| 82 |
+
ELSE 'Pending'
|
| 83 |
+
END as payment_status,
|
| 84 |
+
COUNT(*) as transactions,
|
| 85 |
+
SUM(total_amount) as amount,
|
| 86 |
+
AVG(total_amount) as avg_amount
|
| 87 |
+
FROM sales
|
| 88 |
+
WHERE sale_date BETWEEN '{start_date}' AND '{end_date}'
|
| 89 |
+
GROUP BY payment_status
|
| 90 |
+
''', conn)
|
| 91 |
+
|
| 92 |
+
return reports
|
whatsapp_manager.py
ADDED
|
@@ -0,0 +1,402 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# whatsapp_manager.py
|
| 2 |
+
import pywhatkit
|
| 3 |
+
import logging
|
| 4 |
+
from datetime import datetime, timedelta
|
| 5 |
+
import time
|
| 6 |
+
import pandas as pd
|
| 7 |
+
import os
|
| 8 |
+
import streamlit as st
|
| 9 |
+
|
| 10 |
+
class WhatsAppManager:
|
| 11 |
+
def __init__(self, db_manager):
|
| 12 |
+
self.db = db_manager
|
| 13 |
+
self.logger = logging.getLogger(__name__)
|
| 14 |
+
|
| 15 |
+
def send_message(self, phone_number, message, image_path=None):
|
| 16 |
+
"""Send WhatsApp message with error handling"""
|
| 17 |
+
try:
|
| 18 |
+
# Clean phone number
|
| 19 |
+
phone_number = self._clean_phone_number(phone_number)
|
| 20 |
+
if not phone_number:
|
| 21 |
+
self._log_message(phone_number, message, "failed", "Invalid phone number")
|
| 22 |
+
return False
|
| 23 |
+
|
| 24 |
+
st.info(f"📱 Preparing to send WhatsApp message to {phone_number}")
|
| 25 |
+
|
| 26 |
+
# Schedule message (sends in 2 minutes)
|
| 27 |
+
send_time = datetime.now() + timedelta(minutes=2)
|
| 28 |
+
|
| 29 |
+
try:
|
| 30 |
+
if image_path and os.path.exists(image_path):
|
| 31 |
+
pywhatkit.sendwhats_image(
|
| 32 |
+
phone_number,
|
| 33 |
+
image_path,
|
| 34 |
+
message,
|
| 35 |
+
wait_time=20,
|
| 36 |
+
tab_close=True
|
| 37 |
+
)
|
| 38 |
+
else:
|
| 39 |
+
pywhatkit.sendwhatmsg(
|
| 40 |
+
phone_number,
|
| 41 |
+
message,
|
| 42 |
+
send_time.hour,
|
| 43 |
+
send_time.minute,
|
| 44 |
+
wait_time=20,
|
| 45 |
+
tab_close=True
|
| 46 |
+
)
|
| 47 |
+
|
| 48 |
+
# Log the message
|
| 49 |
+
self._log_message(phone_number, message, "sent")
|
| 50 |
+
st.success(f"✅ Message sent successfully to {phone_number}")
|
| 51 |
+
return True
|
| 52 |
+
|
| 53 |
+
except Exception as e:
|
| 54 |
+
error_msg = f"PyWhatKit error: {str(e)}"
|
| 55 |
+
st.error(f"❌ {error_msg}")
|
| 56 |
+
self._log_message(phone_number, message, "failed", error_msg)
|
| 57 |
+
return False
|
| 58 |
+
|
| 59 |
+
except Exception as e:
|
| 60 |
+
error_msg = f"Unexpected error: {str(e)}"
|
| 61 |
+
st.error(f"❌ {error_msg}")
|
| 62 |
+
self._log_message(phone_number, message, "failed", error_msg)
|
| 63 |
+
return False
|
| 64 |
+
|
| 65 |
+
def send_bulk_messages(self, customer_ids, message_template):
|
| 66 |
+
"""Send messages to multiple customers"""
|
| 67 |
+
results = []
|
| 68 |
+
total_customers = len(customer_ids)
|
| 69 |
+
|
| 70 |
+
progress_bar = st.progress(0)
|
| 71 |
+
status_text = st.empty()
|
| 72 |
+
|
| 73 |
+
for i, customer_id in enumerate(customer_ids):
|
| 74 |
+
try:
|
| 75 |
+
# Update progress
|
| 76 |
+
progress = (i + 1) / total_customers
|
| 77 |
+
progress_bar.progress(progress)
|
| 78 |
+
status_text.text(f"Processing {i+1}/{total_customers} customers...")
|
| 79 |
+
|
| 80 |
+
customer = self.db.get_dataframe('customers',
|
| 81 |
+
f"SELECT * FROM customers WHERE customer_id = {customer_id}")
|
| 82 |
+
|
| 83 |
+
if not customer.empty:
|
| 84 |
+
customer_data = customer.iloc[0]
|
| 85 |
+
phone = customer_data['mobile']
|
| 86 |
+
|
| 87 |
+
if phone and pd.notna(phone) and str(phone).strip():
|
| 88 |
+
personalized_msg = self._personalize_message(message_template, customer_data)
|
| 89 |
+
success = self.send_message(phone, personalized_msg)
|
| 90 |
+
|
| 91 |
+
results.append({
|
| 92 |
+
'customer_id': customer_id,
|
| 93 |
+
'customer_name': customer_data['name'],
|
| 94 |
+
'phone': phone,
|
| 95 |
+
'status': 'sent' if success else 'failed',
|
| 96 |
+
'message': personalized_msg[:50] + "..." if len(personalized_msg) > 50 else personalized_msg
|
| 97 |
+
})
|
| 98 |
+
else:
|
| 99 |
+
results.append({
|
| 100 |
+
'customer_id': customer_id,
|
| 101 |
+
'customer_name': customer_data['name'],
|
| 102 |
+
'phone': phone,
|
| 103 |
+
'status': 'failed',
|
| 104 |
+
'message': 'No phone number available'
|
| 105 |
+
})
|
| 106 |
+
else:
|
| 107 |
+
results.append({
|
| 108 |
+
'customer_id': customer_id,
|
| 109 |
+
'customer_name': 'Unknown',
|
| 110 |
+
'phone': 'N/A',
|
| 111 |
+
'status': 'failed',
|
| 112 |
+
'message': 'Customer not found'
|
| 113 |
+
})
|
| 114 |
+
|
| 115 |
+
# Small delay to avoid rate limiting
|
| 116 |
+
time.sleep(2)
|
| 117 |
+
|
| 118 |
+
except Exception as e:
|
| 119 |
+
results.append({
|
| 120 |
+
'customer_id': customer_id,
|
| 121 |
+
'customer_name': 'Error',
|
| 122 |
+
'phone': 'N/A',
|
| 123 |
+
'status': 'failed',
|
| 124 |
+
'message': f'Error: {str(e)}'
|
| 125 |
+
})
|
| 126 |
+
|
| 127 |
+
progress_bar.empty()
|
| 128 |
+
status_text.empty()
|
| 129 |
+
|
| 130 |
+
return results
|
| 131 |
+
|
| 132 |
+
def send_distributor_message(self, distributor_id, message):
|
| 133 |
+
"""Send message to distributor"""
|
| 134 |
+
try:
|
| 135 |
+
distributor = self.db.get_dataframe('distributors',
|
| 136 |
+
f"SELECT * FROM distributors WHERE distributor_id = {distributor_id}")
|
| 137 |
+
|
| 138 |
+
if not distributor.empty:
|
| 139 |
+
distributor_data = distributor.iloc[0]
|
| 140 |
+
phone = distributor_data['mantri_mobile']
|
| 141 |
+
|
| 142 |
+
if phone and pd.notna(phone) and str(phone).strip():
|
| 143 |
+
personalized_msg = self._personalize_distributor_message(message, distributor_data)
|
| 144 |
+
return self.send_message(phone, personalized_msg)
|
| 145 |
+
else:
|
| 146 |
+
st.warning(f"No mobile number found for distributor: {distributor_data['name']}")
|
| 147 |
+
return False
|
| 148 |
+
else:
|
| 149 |
+
st.error("Distributor not found")
|
| 150 |
+
return False
|
| 151 |
+
|
| 152 |
+
except Exception as e:
|
| 153 |
+
st.error(f"Error sending distributor message: {e}")
|
| 154 |
+
return False
|
| 155 |
+
|
| 156 |
+
def _personalize_distributor_message(self, template, distributor_data):
|
| 157 |
+
"""Personalize message for distributor"""
|
| 158 |
+
message = template
|
| 159 |
+
message = message.replace('{name}', distributor_data.get('mantri_name', 'Distributor'))
|
| 160 |
+
message = message.replace('{distributor_name}', distributor_data.get('name', ''))
|
| 161 |
+
message = message.replace('{village}', distributor_data.get('village', ''))
|
| 162 |
+
message = message.replace('{taluka}', distributor_data.get('taluka', ''))
|
| 163 |
+
message = message.replace('{sabhasad_count}', str(distributor_data.get('sabhasad_count', 0)))
|
| 164 |
+
return message
|
| 165 |
+
|
| 166 |
+
def _clean_phone_number(self, phone):
|
| 167 |
+
"""Clean and validate phone number"""
|
| 168 |
+
if not phone or pd.isna(phone):
|
| 169 |
+
return None
|
| 170 |
+
|
| 171 |
+
# Convert to string and remove spaces, hyphens, etc.
|
| 172 |
+
phone_str = str(phone).strip()
|
| 173 |
+
clean_phone = ''.join(filter(str.isdigit, phone_str))
|
| 174 |
+
|
| 175 |
+
# Validate length
|
| 176 |
+
if len(clean_phone) < 10:
|
| 177 |
+
return None
|
| 178 |
+
|
| 179 |
+
# Add country code if missing (assuming India)
|
| 180 |
+
if len(clean_phone) == 10:
|
| 181 |
+
clean_phone = '91' + clean_phone
|
| 182 |
+
elif len(clean_phone) == 11 and clean_phone.startswith('0'):
|
| 183 |
+
clean_phone = '91' + clean_phone[1:]
|
| 184 |
+
elif len(clean_phone) == 12 and clean_phone.startswith('91'):
|
| 185 |
+
# Already correct format
|
| 186 |
+
pass
|
| 187 |
+
else:
|
| 188 |
+
# If longer than 12 digits, take last 12
|
| 189 |
+
if len(clean_phone) > 12:
|
| 190 |
+
clean_phone = clean_phone[-12:]
|
| 191 |
+
|
| 192 |
+
return '+' + clean_phone
|
| 193 |
+
|
| 194 |
+
def _personalize_message(self, template, customer_data):
|
| 195 |
+
"""Personalize message with customer data"""
|
| 196 |
+
message = template
|
| 197 |
+
message = message.replace('{name}', customer_data.get('name', 'Customer'))
|
| 198 |
+
message = message.replace('{village}', customer_data.get('village', ''))
|
| 199 |
+
message = message.replace('{taluka}', customer_data.get('taluka', ''))
|
| 200 |
+
message = message.replace('{district}', customer_data.get('district', ''))
|
| 201 |
+
|
| 202 |
+
# Add current date
|
| 203 |
+
current_date = datetime.now().strftime('%d-%m-%Y')
|
| 204 |
+
message = message.replace('{date}', current_date)
|
| 205 |
+
|
| 206 |
+
return message
|
| 207 |
+
|
| 208 |
+
def _log_message(self, phone, message, status, error=None):
|
| 209 |
+
"""Log WhatsApp message in database"""
|
| 210 |
+
try:
|
| 211 |
+
# Ensure whatsapp_logs table exists
|
| 212 |
+
self.db.execute_query('''
|
| 213 |
+
CREATE TABLE IF NOT EXISTS whatsapp_logs (
|
| 214 |
+
log_id INTEGER PRIMARY KEY AUTOINCREMENT,
|
| 215 |
+
customer_id INTEGER,
|
| 216 |
+
distributor_id INTEGER,
|
| 217 |
+
phone_number TEXT,
|
| 218 |
+
message_content TEXT,
|
| 219 |
+
message_type TEXT,
|
| 220 |
+
status TEXT,
|
| 221 |
+
error_message TEXT,
|
| 222 |
+
sent_date TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
| 223 |
+
FOREIGN KEY (customer_id) REFERENCES customers (customer_id) ON DELETE SET NULL,
|
| 224 |
+
FOREIGN KEY (distributor_id) REFERENCES distributors (distributor_id) ON DELETE SET NULL
|
| 225 |
+
)
|
| 226 |
+
''', log_action=False)
|
| 227 |
+
|
| 228 |
+
# Find customer by phone (last 10 digits)
|
| 229 |
+
customer_result = self.db.execute_query(
|
| 230 |
+
'SELECT customer_id FROM customers WHERE mobile LIKE ?',
|
| 231 |
+
(f'%{phone[-10:]}%',),
|
| 232 |
+
log_action=False
|
| 233 |
+
)
|
| 234 |
+
|
| 235 |
+
customer_id = customer_result[0][0] if customer_result else None
|
| 236 |
+
|
| 237 |
+
# Find distributor by phone
|
| 238 |
+
distributor_result = self.db.execute_query(
|
| 239 |
+
'SELECT distributor_id FROM distributors WHERE mantri_mobile LIKE ?',
|
| 240 |
+
(f'%{phone[-10:]}%',),
|
| 241 |
+
log_action=False
|
| 242 |
+
)
|
| 243 |
+
|
| 244 |
+
distributor_id = distributor_result[0][0] if distributor_result else None
|
| 245 |
+
|
| 246 |
+
# Determine message type
|
| 247 |
+
message_type = 'customer' if customer_id else 'distributor' if distributor_id else 'general'
|
| 248 |
+
|
| 249 |
+
self.db.execute_query('''
|
| 250 |
+
INSERT INTO whatsapp_logs (customer_id, distributor_id, phone_number, message_content, message_type, status, error_message)
|
| 251 |
+
VALUES (?, ?, ?, ?, ?, ?, ?)
|
| 252 |
+
''', (customer_id, distributor_id, phone, message, message_type, status, error), log_action=False)
|
| 253 |
+
|
| 254 |
+
except Exception as e:
|
| 255 |
+
self.logger.error(f"Failed to log message: {e}")
|
| 256 |
+
|
| 257 |
+
def get_message_stats(self):
|
| 258 |
+
"""Get messaging statistics"""
|
| 259 |
+
try:
|
| 260 |
+
stats = self.db.execute_query('''
|
| 261 |
+
SELECT
|
| 262 |
+
COUNT(*) as total_messages,
|
| 263 |
+
SUM(CASE WHEN status = 'sent' THEN 1 ELSE 0 END) as sent_messages,
|
| 264 |
+
SUM(CASE WHEN status = 'failed' THEN 1 ELSE 0 END) as failed_messages,
|
| 265 |
+
MAX(sent_date) as last_message_date
|
| 266 |
+
FROM whatsapp_logs
|
| 267 |
+
''', log_action=False)
|
| 268 |
+
|
| 269 |
+
if stats:
|
| 270 |
+
return {
|
| 271 |
+
'total_messages': stats[0][0] or 0,
|
| 272 |
+
'sent_messages': stats[0][1] or 0,
|
| 273 |
+
'failed_messages': stats[0][2] or 0,
|
| 274 |
+
'last_message_date': stats[0][3]
|
| 275 |
+
}
|
| 276 |
+
return {}
|
| 277 |
+
|
| 278 |
+
except Exception as e:
|
| 279 |
+
self.logger.error(f"Error getting message stats: {e}")
|
| 280 |
+
return {}
|
| 281 |
+
|
| 282 |
+
def get_recent_messages(self, limit=10):
|
| 283 |
+
"""Get recent WhatsApp messages"""
|
| 284 |
+
try:
|
| 285 |
+
messages = self.db.get_dataframe('whatsapp_logs', f'''
|
| 286 |
+
SELECT wl.*,
|
| 287 |
+
c.name as customer_name,
|
| 288 |
+
d.name as distributor_name
|
| 289 |
+
FROM whatsapp_logs wl
|
| 290 |
+
LEFT JOIN customers c ON wl.customer_id = c.customer_id
|
| 291 |
+
LEFT JOIN distributors d ON wl.distributor_id = d.distributor_id
|
| 292 |
+
ORDER BY wl.sent_date DESC
|
| 293 |
+
LIMIT {limit}
|
| 294 |
+
''')
|
| 295 |
+
return messages
|
| 296 |
+
except Exception as e:
|
| 297 |
+
self.logger.error(f"Error getting recent messages: {e}")
|
| 298 |
+
return pd.DataFrame()
|
| 299 |
+
|
| 300 |
+
def send_welcome_message(self, phone_number, name, role="customer"):
|
| 301 |
+
"""Send welcome message to new customers/distributors"""
|
| 302 |
+
if role == "distributor":
|
| 303 |
+
message = f"""Welcome {name}! 🎉
|
| 304 |
+
|
| 305 |
+
Thank you for joining our distributor network!
|
| 306 |
+
|
| 307 |
+
We're excited to have you on board and look forward to working together to grow your business.
|
| 308 |
+
|
| 309 |
+
Our team will contact you shortly to discuss:
|
| 310 |
+
• Training schedule
|
| 311 |
+
• Product information
|
| 312 |
+
• Sales strategies
|
| 313 |
+
• Support systems
|
| 314 |
+
|
| 315 |
+
For any immediate queries, feel free to contact us.
|
| 316 |
+
|
| 317 |
+
Best regards,
|
| 318 |
+
Sales Team"""
|
| 319 |
+
else:
|
| 320 |
+
message = f"""Welcome {name}! 🎉
|
| 321 |
+
|
| 322 |
+
Thank you for choosing us!
|
| 323 |
+
|
| 324 |
+
We're delighted to have you as our customer and look forward to serving you with the best products and service.
|
| 325 |
+
|
| 326 |
+
Feel free to reach out for any queries or support.
|
| 327 |
+
|
| 328 |
+
Best regards,
|
| 329 |
+
Sales Team"""
|
| 330 |
+
|
| 331 |
+
return self.send_message(phone_number, message)
|
| 332 |
+
|
| 333 |
+
def send_payment_reminder(self, customer_id, invoice_no, pending_amount):
|
| 334 |
+
"""Send payment reminder to customer"""
|
| 335 |
+
try:
|
| 336 |
+
customer = self.db.get_dataframe('customers',
|
| 337 |
+
f"SELECT * FROM customers WHERE customer_id = {customer_id}")
|
| 338 |
+
|
| 339 |
+
if not customer.empty:
|
| 340 |
+
customer_data = customer.iloc[0]
|
| 341 |
+
phone = customer_data['mobile']
|
| 342 |
+
|
| 343 |
+
if phone and pd.notna(phone) and str(phone).strip():
|
| 344 |
+
message = f"""Hello {customer_data['name']},
|
| 345 |
+
|
| 346 |
+
This is a friendly reminder regarding your pending payment.
|
| 347 |
+
|
| 348 |
+
Invoice: {invoice_no}
|
| 349 |
+
Pending Amount: ₹{pending_amount:,.2f}
|
| 350 |
+
|
| 351 |
+
Please make the payment at your earliest convenience.
|
| 352 |
+
|
| 353 |
+
Thank you for your cooperation!
|
| 354 |
+
|
| 355 |
+
Best regards,
|
| 356 |
+
Sales Team"""
|
| 357 |
+
|
| 358 |
+
return self.send_message(phone, message)
|
| 359 |
+
else:
|
| 360 |
+
st.warning(f"No mobile number found for customer: {customer_data['name']}")
|
| 361 |
+
return False
|
| 362 |
+
else:
|
| 363 |
+
st.error("Customer not found")
|
| 364 |
+
return False
|
| 365 |
+
|
| 366 |
+
except Exception as e:
|
| 367 |
+
st.error(f"Error sending payment reminder: {e}")
|
| 368 |
+
return False
|
| 369 |
+
|
| 370 |
+
# Utility function to check WhatsApp availability
|
| 371 |
+
def check_whatsapp_availability():
|
| 372 |
+
"""Check if WhatsApp features are available"""
|
| 373 |
+
try:
|
| 374 |
+
import pywhatkit
|
| 375 |
+
return True
|
| 376 |
+
except ImportError:
|
| 377 |
+
return False
|
| 378 |
+
|
| 379 |
+
# Example usage and test function
|
| 380 |
+
def test_whatsapp_manager(db):
|
| 381 |
+
"""Test WhatsApp manager functionality"""
|
| 382 |
+
st.subheader("🧪 WhatsApp Manager Test")
|
| 383 |
+
|
| 384 |
+
if st.button("Test WhatsApp Connection"):
|
| 385 |
+
try:
|
| 386 |
+
manager = WhatsAppManager(db)
|
| 387 |
+
|
| 388 |
+
# Test phone number cleaning
|
| 389 |
+
test_numbers = ["9876543210", "09876543210", "919876543210"]
|
| 390 |
+
for num in test_numbers:
|
| 391 |
+
cleaned = manager._clean_phone_number(num)
|
| 392 |
+
st.write(f"Original: {num} → Cleaned: {cleaned}")
|
| 393 |
+
|
| 394 |
+
# Test message personalization
|
| 395 |
+
test_customer = {'name': 'John Doe', 'village': 'Test Village'}
|
| 396 |
+
personalized = manager._personalize_message("Hello {name} from {village}!", test_customer)
|
| 397 |
+
st.write(f"Personalized message: {personalized}")
|
| 398 |
+
|
| 399 |
+
st.success("✅ WhatsApp manager test completed successfully!")
|
| 400 |
+
|
| 401 |
+
except Exception as e:
|
| 402 |
+
st.error(f"❌ WhatsApp manager test failed: {e}")
|