raj thakur commited on
Commit
48fe444
·
1 Parent(s): 5fc2939

Saving local changes before rebase

Browse files
__pycache__/data_processor.cpython-313.pyc CHANGED
Binary files a/__pycache__/data_processor.cpython-313.pyc and b/__pycache__/data_processor.cpython-313.pyc differ
 
__pycache__/database.cpython-313.pyc CHANGED
Binary files a/__pycache__/database.cpython-313.pyc and b/__pycache__/database.cpython-313.pyc differ
 
__pycache__/sales_data_processor.cpython-310.pyc ADDED
Binary file (15.4 kB). View file
 
__pycache__/sales_data_processor.cpython-313.pyc ADDED
Binary file (24.3 kB). View file
 
__pycache__/whatsapp_manager.cpython-313.pyc ADDED
Binary file (17.6 kB). View file
 
automation.py CHANGED
@@ -5,6 +5,8 @@ import smtplib
5
  from email.mime.text import MimeText
6
  from email.mime.multipart import MimeMultipart
7
  from datetime import datetime, timedelta
 
 
8
 
9
  class AutomationManager:
10
  def __init__(self, db_manager, whatsapp_manager):
 
5
  from email.mime.text import MimeText
6
  from email.mime.multipart import MimeMultipart
7
  from datetime import datetime, timedelta
8
+ from analytics import Analytics
9
+ import os
10
 
11
  class AutomationManager:
12
  def __init__(self, db_manager, whatsapp_manager):
components/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (147 Bytes). View file
 
components/__pycache__/database_status.cpython-313.pyc ADDED
Binary file (1.96 kB). View file
 
pages/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (142 Bytes). View file
 
pages/__pycache__/customers.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/customers.cpython-310.pyc and b/pages/__pycache__/customers.cpython-310.pyc differ
 
pages/__pycache__/dashboard.cpython-313.pyc ADDED
Binary file (7.01 kB). View file
 
pages/__pycache__/data_import.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/data_import.cpython-310.pyc and b/pages/__pycache__/data_import.cpython-310.pyc differ
 
pages/__pycache__/demos.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/demos.cpython-310.pyc and b/pages/__pycache__/demos.cpython-310.pyc differ
 
pages/__pycache__/distributors.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/distributors.cpython-310.pyc and b/pages/__pycache__/distributors.cpython-310.pyc differ
 
pages/__pycache__/file_viewer.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/file_viewer.cpython-310.pyc and b/pages/__pycache__/file_viewer.cpython-310.pyc differ
 
pages/__pycache__/payments.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/payments.cpython-310.pyc and b/pages/__pycache__/payments.cpython-310.pyc differ
 
pages/__pycache__/reports.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/reports.cpython-310.pyc and b/pages/__pycache__/reports.cpython-310.pyc differ
 
pages/__pycache__/sales.cpython-310.pyc CHANGED
Binary files a/pages/__pycache__/sales.cpython-310.pyc and b/pages/__pycache__/sales.cpython-310.pyc differ
 
sales_data_processor.py ADDED
@@ -0,0 +1,502 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # sales_data_processor.py
2
+ import pandas as pd
3
+ import re
4
+ from datetime import datetime
5
+ import numpy as np
6
+ import sqlite3
7
+ import os
8
+ import streamlit as st
9
+
10
+ class SalesDataProcessor:
11
+ def __init__(self, db):
12
+ self.db = db
13
+ self.setup_product_mapping()
14
+ self.setup_location_mapping()
15
+ self.setup_database_tables()
16
+
17
+ def setup_database_tables(self):
18
+ """Initialize database tables if they don't exist"""
19
+ conn = self.db.get_connection()
20
+ cursor = conn.cursor()
21
+
22
+ # Create sales table
23
+ cursor.execute('''
24
+ CREATE TABLE IF NOT EXISTS sales (
25
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
26
+ source_sheet TEXT,
27
+ sr_no TEXT,
28
+ customer_name TEXT,
29
+ village TEXT,
30
+ taluka TEXT,
31
+ district TEXT,
32
+ invoice_no TEXT UNIQUE,
33
+ reference TEXT,
34
+ dispatch_date TEXT,
35
+ product_type TEXT,
36
+ quantity INTEGER,
37
+ rate_per_unit REAL,
38
+ amount REAL,
39
+ final_amount REAL,
40
+ total_liters REAL,
41
+ payment_date TEXT,
42
+ gpay_amount REAL,
43
+ cash_amount REAL,
44
+ cheque_amount REAL,
45
+ rrn_number TEXT,
46
+ sold_by TEXT,
47
+ sale_type TEXT,
48
+ payment_status TEXT,
49
+ payment_method TEXT,
50
+ processed_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
51
+ source_file TEXT
52
+ )
53
+ ''')
54
+
55
+ # Create customers table (aggregated from sales)
56
+ cursor.execute('''
57
+ CREATE TABLE IF NOT EXISTS customers (
58
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
59
+ customer_name TEXT,
60
+ village TEXT,
61
+ taluka TEXT,
62
+ district TEXT,
63
+ total_purchases REAL DEFAULT 0,
64
+ total_orders INTEGER DEFAULT 0,
65
+ last_order_date TEXT,
66
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
67
+ )
68
+ ''')
69
+
70
+ conn.commit()
71
+ conn.close()
72
+
73
+ def setup_product_mapping(self):
74
+ """Standard product mapping for all packaging types"""
75
+ self.PRODUCT_MAPPING = {
76
+ '1 LTR PLASTIC JAR': '1L_PLASTIC_JAR',
77
+ '2 LTR PLASTIC JAR': '2L_PLASTIC_JAR',
78
+ '5 LTR PLASTIC JAR': '5L_PLASTIC_JAR',
79
+ '10 LTR PLASTIC JAR': '10L_PLASTIC_JAR',
80
+ '5 LTR STEEL BARNI': '5L_STEEL_BARNI',
81
+ '10 LTR STEEL BARNI': '10L_STEEL_BARNI',
82
+ '20 LTR STEEL BARNI': '20L_STEEL_BARNI',
83
+ '20 LTR PLASTIC CAN': '20L_PLASTIC_CAN',
84
+ '1 LTR PET BOTTLE': '1L_PET_BOTTLE',
85
+ '20 LTR CARBO': '20L_CARBO'
86
+ }
87
+
88
+ def setup_location_mapping(self):
89
+ """Gujarati location name standardization"""
90
+ self.GUJARATI_LOCALITIES = {
91
+ 'રામપુરા': 'RAMPURA',
92
+ 'શેખડી': 'SHEKHADI',
93
+ 'સિંહોલ': 'SINHOL',
94
+ 'વનાદરા': 'VANADARA',
95
+ 'માવલી': 'MAVLI',
96
+ 'સિમરડા': 'SIMRADA',
97
+ 'બિલપડ': 'BILPAD',
98
+ 'વઘોડિયા': 'VAGHODIA',
99
+ 'સાકરિયા': 'SAKARIYA'
100
+ }
101
+
102
+ def safe_float(self, value):
103
+ """Safely convert to float, handle errors"""
104
+ if pd.isna(value) or value in ['', 'NOT_AVAILABLE', None, '_']:
105
+ return 0.0
106
+ try:
107
+ return float(value)
108
+ except (ValueError, TypeError):
109
+ return 0.0
110
+
111
+ def safe_int(self, value):
112
+ """Safely convert to integer"""
113
+ return int(self.safe_float(value))
114
+
115
+ def parse_date(self, date_str):
116
+ """Handle all date formats intelligently"""
117
+ if pd.isna(date_str) or date_str in ['', 'NOT_AVAILABLE', None, '_']:
118
+ return 'NOT_AVAILABLE'
119
+
120
+ if isinstance(date_str, (int, float)):
121
+ try:
122
+ return (datetime(1899, 12, 30) + pd.Timedelta(days=date_str)).strftime('%Y-%m-%d')
123
+ except:
124
+ return 'INVALID_DATE'
125
+
126
+ date_str = str(date_str).strip()
127
+
128
+ date_formats = [
129
+ '%Y-%m-%d %H:%M:%S',
130
+ '%d/%m/%Y',
131
+ '%Y-%m-%d',
132
+ '%d-%m-%Y',
133
+ '%d/%m/%Y %H:%M:%S'
134
+ ]
135
+
136
+ for fmt in date_formats:
137
+ try:
138
+ return datetime.strptime(date_str, fmt).strftime('%Y-%m-%d')
139
+ except ValueError:
140
+ continue
141
+
142
+ return 'INVALID_DATE'
143
+
144
+ def clean_name(self, name):
145
+ """Handle names, duplicates, variations"""
146
+ if pd.isna(name) or name in ['', '-', '_', None]:
147
+ return 'NOT_AVAILABLE'
148
+ name = ' '.join(str(name).strip().split())
149
+ return name
150
+
151
+ def standardize_location(self, location):
152
+ """Handle Gujarati location names"""
153
+ if pd.isna(location) or location in ['', 'NOT_AVAILABLE', None]:
154
+ return 'NOT_AVAILABLE'
155
+
156
+ location_str = str(location).strip()
157
+
158
+ if isinstance(location_str, str):
159
+ for guj_name, eng_name in self.GUJARATI_LOCALITIES.items():
160
+ if guj_name in location_str:
161
+ return eng_name
162
+
163
+ return location_str.upper()
164
+
165
+ def standardize_product(self, product_name):
166
+ """Convert any product name to standard format"""
167
+ if pd.isna(product_name) or product_name in ['', 'NOT_AVAILABLE', None]:
168
+ return 'UNKNOWN_PRODUCT'
169
+
170
+ product_str = str(product_name).strip()
171
+ product_upper = product_str.upper()
172
+
173
+ for key, value in self.PRODUCT_MAPPING.items():
174
+ if key in product_upper:
175
+ return value
176
+
177
+ # Fuzzy matching
178
+ if '1 LTR' in product_upper or '1L' in product_upper:
179
+ if 'PLASTIC' in product_upper or 'JAR' in product_upper:
180
+ return '1L_PLASTIC_JAR'
181
+ elif 'PET' in product_upper or 'BOTTLE' in product_upper:
182
+ return '1L_PET_BOTTLE'
183
+ elif '2 LTR' in product_upper or '2L' in product_upper:
184
+ return '2L_PLASTIC_JAR'
185
+ elif '5 LTR' in product_upper or '5L' in product_upper:
186
+ if 'STEEL' in product_upper or 'BARNI' in product_upper:
187
+ return '5L_STEEL_BARNI'
188
+ else:
189
+ return '5L_PLASTIC_JAR'
190
+ elif '10 LTR' in product_upper or '10L' in product_upper:
191
+ if 'STEEL' in product_upper or 'BARNI' in product_upper:
192
+ return '10L_STEEL_BARNI'
193
+ else:
194
+ return '10L_PLASTIC_JAR'
195
+ elif '20 LTR' in product_upper or '20L' in product_upper:
196
+ if 'STEEL' in product_upper or 'BARNI' in product_upper:
197
+ return '20L_STEEL_BARNI'
198
+ elif 'PLASTIC' in product_upper or 'CAN' in product_upper:
199
+ return '20L_PLASTIC_CAN'
200
+ elif 'CARBO' in product_upper:
201
+ return '20L_CARBO'
202
+
203
+ return f"UNKNOWN_{product_upper.replace(' ', '_')}"
204
+
205
+ def detect_sale_type(self, row):
206
+ """Detect if it's demo sale (single unit) or bulk sale"""
207
+ quantity = self.safe_int(row.get('QTN', 0))
208
+ reference = str(row.get('REF.', '')).upper()
209
+
210
+ if reference == 'DEMO' or quantity == 1:
211
+ return 'DEMO_SALE'
212
+ else:
213
+ return 'BULK_SALE'
214
+
215
+ def calculate_payment_status(self, row):
216
+ """Determine payment status intelligently"""
217
+ final_amt = self.safe_float(row.get('FINAL AMT', 0))
218
+ gpay = self.safe_float(row.get('G-PAY', 0))
219
+ cash = self.safe_float(row.get('CASH', 0))
220
+ cheque = self.safe_float(row.get('CHQ', 0))
221
+
222
+ paid_amt = gpay + cash + cheque
223
+
224
+ if paid_amt >= final_amt:
225
+ return 'PAID'
226
+ elif paid_amt > 0:
227
+ return 'PARTIAL_PAID'
228
+ elif self.parse_date(row.get('PAYMENT DATE')) not in ['NOT_AVAILABLE', 'INVALID_DATE']:
229
+ return 'PENDING'
230
+ else:
231
+ return 'UNPAID'
232
+
233
+ def detect_payment_method(self, row):
234
+ """Intelligently detect payment method"""
235
+ gpay = self.safe_float(row.get('G-PAY', 0))
236
+ cash = self.safe_float(row.get('CASH', 0))
237
+ cheque = self.safe_float(row.get('CHQ', 0))
238
+
239
+ if gpay > 0:
240
+ return 'GPAY'
241
+ elif cash > 0:
242
+ return 'CASH'
243
+ elif cheque > 0:
244
+ return 'CHEQUE'
245
+ else:
246
+ return 'NOT_PAID'
247
+
248
+ def process_dataframe(self, df, sheet_name, source_file):
249
+ """Process entire dataframe and standardize all records"""
250
+ standardized_records = []
251
+
252
+ for idx, row in df.iterrows():
253
+ if (pd.isna(row.get('NAME', '')) and
254
+ pd.isna(row.get('PACKING', '')) and
255
+ pd.isna(row.get('INV NO', ''))):
256
+ continue
257
+
258
+ try:
259
+ standardized_record = self.standardize_record(row, sheet_name, source_file)
260
+ standardized_records.append(standardized_record)
261
+ except Exception as e:
262
+ st.error(f"⚠️ Error processing row {idx}: {e}")
263
+ continue
264
+
265
+ return standardized_records
266
+
267
+ def standardize_record(self, row, sheet_name, source_file):
268
+ """Standardize a single record"""
269
+ record = {
270
+ 'source_sheet': sheet_name,
271
+ 'sr_no': self.clean_name(row.get('SR NO.', 'NOT_AVAILABLE')),
272
+ 'customer_name': self.clean_name(row.get('NAME', 'NOT_AVAILABLE')),
273
+ 'village': self.standardize_location(row.get('VILLAGE', 'NOT_AVAILABLE')),
274
+ 'taluka': self.standardize_location(row.get('TALUKA', 'NOT_AVAILABLE')),
275
+ 'district': self.standardize_location(row.get('DISTRICT', 'NOT_AVAILABLE')),
276
+ 'invoice_no': self.clean_name(row.get('INV NO', 'NOT_AVAILABLE')),
277
+ 'reference': self.clean_name(row.get('REF.', 'NOT_AVAILABLE')),
278
+ 'dispatch_date': self.parse_date(row.get('DISPATCH DATE')),
279
+ 'product_type': self.standardize_product(row.get('PACKING', 'NOT_AVAILABLE')),
280
+ 'quantity': self.safe_int(row.get('QTN', 0)),
281
+ 'rate_per_unit': self.safe_float(row.get('RATE', 0)),
282
+ 'amount': self.safe_float(row.get('AMT', 0)),
283
+ 'final_amount': self.safe_float(row.get('FINAL AMT', 0)),
284
+ 'total_liters': self.safe_float(row.get('TOTAL LTR', 0)),
285
+ 'payment_date': self.parse_date(row.get('PAYMENT DATE')),
286
+ 'gpay_amount': self.safe_float(row.get('G-PAY', 0)),
287
+ 'cash_amount': self.safe_float(row.get('CASH', 0)),
288
+ 'cheque_amount': self.safe_float(row.get('CHQ', 0)),
289
+ 'rrn_number': self.clean_name(row.get('RRN', 'NOT_AVAILABLE')),
290
+ 'sold_by': self.clean_name(row.get('BY', 'NOT_AVAILABLE')),
291
+ 'sale_type': self.detect_sale_type(row),
292
+ 'payment_status': self.calculate_payment_status(row),
293
+ 'payment_method': self.detect_payment_method(row),
294
+ 'source_file': os.path.basename(source_file)
295
+ }
296
+
297
+ # Auto-calculate missing amounts
298
+ if record['amount'] == 0 and record['quantity'] > 0 and record['rate_per_unit'] > 0:
299
+ record['amount'] = record['quantity'] * record['rate_per_unit']
300
+
301
+ if record['final_amount'] == 0 and record['amount'] > 0:
302
+ record['final_amount'] = record['amount']
303
+
304
+ return record
305
+
306
+ def insert_into_database(self, records):
307
+ """Insert processed records into database"""
308
+ conn = self.db.get_connection()
309
+ cursor = conn.cursor()
310
+
311
+ inserted_count = 0
312
+ updated_count = 0
313
+
314
+ for record in records:
315
+ try:
316
+ # Check if invoice already exists
317
+ cursor.execute('SELECT id FROM sales WHERE invoice_no = ?', (record['invoice_no'],))
318
+ existing = cursor.fetchone()
319
+
320
+ if existing:
321
+ # Update existing record
322
+ update_query = '''
323
+ UPDATE sales SET
324
+ source_sheet=?, sr_no=?, customer_name=?, village=?, taluka=?, district=?,
325
+ reference=?, dispatch_date=?, product_type=?, quantity=?, rate_per_unit=?,
326
+ amount=?, final_amount=?, total_liters=?, payment_date=?, gpay_amount=?,
327
+ cash_amount=?, cheque_amount=?, rrn_number=?, sold_by=?, sale_type=?,
328
+ payment_status=?, payment_method=?, source_file=?
329
+ WHERE invoice_no=?
330
+ '''
331
+ cursor.execute(update_query, (
332
+ record['source_sheet'], record['sr_no'], record['customer_name'],
333
+ record['village'], record['taluka'], record['district'],
334
+ record['reference'], record['dispatch_date'], record['product_type'],
335
+ record['quantity'], record['rate_per_unit'], record['amount'],
336
+ record['final_amount'], record['total_liters'], record['payment_date'],
337
+ record['gpay_amount'], record['cash_amount'], record['cheque_amount'],
338
+ record['rrn_number'], record['sold_by'], record['sale_type'],
339
+ record['payment_status'], record['payment_method'], record['source_file'],
340
+ record['invoice_no']
341
+ ))
342
+ updated_count += 1
343
+ else:
344
+ # Insert new record
345
+ insert_query = '''
346
+ INSERT INTO sales (
347
+ source_sheet, sr_no, customer_name, village, taluka, district,
348
+ invoice_no, reference, dispatch_date, product_type, quantity,
349
+ rate_per_unit, amount, final_amount, total_liters, payment_date,
350
+ gpay_amount, cash_amount, cheque_amount, rrn_number, sold_by,
351
+ sale_type, payment_status, payment_method, source_file
352
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
353
+ '''
354
+ cursor.execute(insert_query, (
355
+ record['source_sheet'], record['sr_no'], record['customer_name'],
356
+ record['village'], record['taluka'], record['district'],
357
+ record['invoice_no'], record['reference'], record['dispatch_date'],
358
+ record['product_type'], record['quantity'], record['rate_per_unit'],
359
+ record['amount'], record['final_amount'], record['total_liters'],
360
+ record['payment_date'], record['gpay_amount'], record['cash_amount'],
361
+ record['cheque_amount'], record['rrn_number'], record['sold_by'],
362
+ record['sale_type'], record['payment_status'], record['payment_method'],
363
+ record['source_file']
364
+ ))
365
+ inserted_count += 1
366
+
367
+ except Exception as e:
368
+ st.error(f"❌ Database error for invoice {record['invoice_no']}: {e}")
369
+ continue
370
+
371
+ conn.commit()
372
+
373
+ # Update customers table
374
+ self.update_customers_table(conn)
375
+
376
+ conn.close()
377
+
378
+ return inserted_count, updated_count
379
+
380
+ def update_customers_table(self, conn):
381
+ """Update customers table from sales data"""
382
+ cursor = conn.cursor()
383
+
384
+ # Clear and rebuild customers table
385
+ cursor.execute('DELETE FROM customers')
386
+
387
+ # Insert aggregated customer data
388
+ cursor.execute('''
389
+ INSERT INTO customers (customer_name, village, taluka, district, total_purchases, total_orders, last_order_date)
390
+ SELECT
391
+ customer_name,
392
+ village,
393
+ taluka,
394
+ district,
395
+ SUM(final_amount) as total_purchases,
396
+ COUNT(*) as total_orders,
397
+ MAX(dispatch_date) as last_order_date
398
+ FROM sales
399
+ WHERE customer_name != 'NOT_AVAILABLE'
400
+ GROUP BY customer_name, village, taluka, district
401
+ ''')
402
+
403
+ conn.commit()
404
+
405
+ def process_excel_file(self, file_path):
406
+ """Main method to process Excel file - called from Streamlit"""
407
+ try:
408
+ st.info(f"🔄 Processing: {os.path.basename(file_path)}")
409
+
410
+ # Read the Excel file
411
+ xl = pd.ExcelFile(file_path)
412
+
413
+ # Process each sheet
414
+ all_records = []
415
+
416
+ for sheet_name in xl.sheet_names:
417
+ with st.spinner(f"Processing sheet: {sheet_name}..."):
418
+ # Read sheet
419
+ df = pd.read_excel(file_path, sheet_name=sheet_name)
420
+
421
+ # Standardize data
422
+ standardized_records = self.process_dataframe(df, sheet_name, file_path)
423
+ all_records.extend(standardized_records)
424
+
425
+ if not all_records:
426
+ st.warning("⚠️ No valid records found in the file")
427
+ return False
428
+
429
+ # Insert into database
430
+ with st.spinner("Inserting into database..."):
431
+ inserted, updated = self.insert_into_database(all_records)
432
+
433
+ # Show results
434
+ if inserted > 0 or updated > 0:
435
+ st.success(f"✅ Processed {len(all_records)} records from {os.path.basename(file_path)}")
436
+ st.success(f"📊 New: {inserted}, Updated: {updated}")
437
+
438
+ # Show quick summary
439
+ self.show_import_summary(all_records)
440
+ return True
441
+ else:
442
+ st.warning("⚠️ No records were inserted or updated")
443
+ return False
444
+
445
+ except Exception as e:
446
+ st.error(f"❌ Error processing file: {e}")
447
+ return False
448
+
449
+ def show_import_summary(self, records):
450
+ """Show summary of imported data"""
451
+ if not records:
452
+ return
453
+
454
+ df = pd.DataFrame(records)
455
+
456
+ col1, col2, col3, col4 = st.columns(4)
457
+
458
+ with col1:
459
+ st.metric("Total Records", len(records))
460
+ with col2:
461
+ demo_sales = len(df[df['sale_type'] == 'DEMO_SALE'])
462
+ st.metric("Demo Sales", demo_sales)
463
+ with col3:
464
+ bulk_sales = len(df[df['sale_type'] == 'BULK_SALE'])
465
+ st.metric("Bulk Sales", bulk_sales)
466
+ with col4:
467
+ total_amount = df['final_amount'].sum()
468
+ st.metric("Total Amount", f"₹{total_amount:,.2f}")
469
+
470
+ # Show top products
471
+ st.subheader("📦 Products Imported")
472
+ product_summary = df['product_type'].value_counts().head(5)
473
+ for product, count in product_summary.items():
474
+ st.write(f"- {product}: {count} records")
475
+
476
+ def get_import_stats(self):
477
+ """Get import statistics for dashboard"""
478
+ conn = self.db.get_connection()
479
+
480
+ try:
481
+ # Total records
482
+ total_records = pd.read_sql('SELECT COUNT(*) as count FROM sales', conn)['count'].iloc[0]
483
+
484
+ # Files processed
485
+ files_processed = pd.read_sql('SELECT COUNT(DISTINCT source_file) as count FROM sales', conn)['count'].iloc[0]
486
+
487
+ # Recent imports
488
+ recent_imports = pd.read_sql('''
489
+ SELECT source_file, COUNT(*) as records, MAX(processed_at) as last_import
490
+ FROM sales
491
+ GROUP BY source_file
492
+ ORDER BY last_import DESC
493
+ LIMIT 5
494
+ ''', conn)
495
+
496
+ return {
497
+ 'total_records': total_records,
498
+ 'files_processed': files_processed,
499
+ 'recent_imports': recent_imports.to_dict('records')
500
+ }
501
+ finally:
502
+ conn.close()
sales_manager.py CHANGED
@@ -55,7 +55,7 @@ class EnhancedSalesManager:
55
  # Sales trends
56
  reports['sales_trends'] = pd.read_sql(f'''
57
  SELECT DATE(sale_date) as date, SUM(total_amount) as daily_sales,
58
- SUM(total_liters) as daily_liters, COUNT(*) as transactions
59
  FROM sales
60
  WHERE sale_date BETWEEN '{start_date}' AND '{end_date}'
61
  GROUP BY DATE(sale_date)
@@ -65,7 +65,7 @@ class EnhancedSalesManager:
65
  # Product performance
66
  reports['product_performance'] = pd.read_sql(f'''
67
  SELECT p.packing_type, p.capacity_ltr, SUM(si.quantity) as total_quantity,
68
- SUM(si.amount) as total_revenue, COUNT(DISTINCT s.sale_id) as transactions
69
  FROM sale_items si
70
  JOIN products p ON si.product_id = p.product_id
71
  JOIN sales s ON si.sale_id = s.sale_id
 
55
  # Sales trends
56
  reports['sales_trends'] = pd.read_sql(f'''
57
  SELECT DATE(sale_date) as date, SUM(total_amount) as daily_sales,
58
+ SUM(total_liters) as daily_liters, COUNT(*) as transactions
59
  FROM sales
60
  WHERE sale_date BETWEEN '{start_date}' AND '{end_date}'
61
  GROUP BY DATE(sale_date)
 
65
  # Product performance
66
  reports['product_performance'] = pd.read_sql(f'''
67
  SELECT p.packing_type, p.capacity_ltr, SUM(si.quantity) as total_quantity,
68
+ SUM(si.amount) as total_revenue, COUNT(DISTINCT s.sale_id) as transactions
69
  FROM sale_items si
70
  JOIN products p ON si.product_id = p.product_id
71
  JOIN sales s ON si.sale_id = s.sale_id
utils/__pycache__/__init__.cpython-313.pyc ADDED
Binary file (142 Bytes). View file
 
utils/__pycache__/helpers.cpython-313.pyc ADDED
Binary file (2.58 kB). View file
 
utils/__pycache__/styling.cpython-310.pyc CHANGED
Binary files a/utils/__pycache__/styling.cpython-310.pyc and b/utils/__pycache__/styling.cpython-310.pyc differ
 
utils/__pycache__/styling.cpython-313.pyc ADDED
Binary file (2.34 kB). View file
 
utils/styling.py CHANGED
@@ -5,6 +5,10 @@ def apply_custom_css():
5
  """Apply custom CSS styling"""
6
  st.markdown("""
7
  <style>
 
 
 
 
8
  .main-header {
9
  font-size: 2.5rem;
10
  color: #1f77b4;
 
5
  """Apply custom CSS styling"""
6
  st.markdown("""
7
  <style>
8
+ [data-testid="stSidebarNav"] {
9
+ display: none !important;
10
+ }
11
+
12
  .main-header {
13
  font-size: 2.5rem;
14
  color: #1f77b4;