RathodHarish commited on
Commit
b7685f7
·
verified ·
1 Parent(s): c26f5ce

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +178 -0
app.py CHANGED
@@ -10,10 +10,24 @@ import os
10
  import io
11
  import time
12
  import asyncio
 
13
 
14
  # Configure logging
15
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  # Try to import reportlab
18
  try:
19
  from reportlab.lib.pagesizes import letter
@@ -26,6 +40,166 @@ except ImportError:
26
  logging.warning("reportlab module not found. PDF generation disabled.")
27
  reportlab_available = False
28
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
29
  # Summarize logs
30
  def summarize_logs(df):
31
  try:
@@ -398,6 +572,10 @@ async def process_logs(file_obj, lab_site_filter, equipment_type_filter, date_ra
398
  anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
399
  device_cards = generate_device_cards(filtered_df)
400
 
 
 
 
 
401
  elapsed_time = time.time() - start_time
402
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
403
  if elapsed_time > 3:
 
10
  import io
11
  import time
12
  import asyncio
13
+ from simple_salesforce import Salesforce
14
 
15
  # Configure logging
16
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
17
 
18
+ # Salesforce configuration
19
+ try:
20
+ sf = Salesforce(
21
+ username='multi-devicelabopsdashboard@sathkrutha.com',
22
+ password='Team@1234',
23
+ security_token=os.getenv('SF_SECURITY_TOKEN', ''),
24
+ domain='login'
25
+ )
26
+ logging.info("Salesforce connection established")
27
+ except Exception as e:
28
+ logging.error(f"Failed to connect to Salesforce: {str(e)}")
29
+ sf = None
30
+
31
  # Try to import reportlab
32
  try:
33
  from reportlab.lib.pagesizes import letter
 
40
  logging.warning("reportlab module not found. PDF generation disabled.")
41
  reportlab_available = False
42
 
43
+ # Cache picklist values at startup
44
+ def get_picklist_values(field_name):
45
+ if sf is None:
46
+ return []
47
+ try:
48
+ obj_desc = sf.SmartLog__c.describe()
49
+ for field in obj_desc['fields']:
50
+ if field['name'] == field_name:
51
+ return [value['value'] for value in field['picklistValues'] if value['active']]
52
+ return []
53
+ except Exception as e:
54
+ logging.error(f"Failed to fetch picklist values for {field_name}: {str(e)}")
55
+ return []
56
+
57
+ status_values = get_picklist_values('Status__c') or ["Active", "Inactive", "Pending"]
58
+ log_type_values = get_picklist_values('Log_Type__c') or ["Smart Log", "Cell Analysis", "UV Verification"]
59
+ logging.info(f"Valid Status__c values: {status_values}")
60
+ logging.info(f"Valid Log_Type__c values: {log_type_values}")
61
+
62
+ # Map invalid picklist values
63
+ picklist_mapping = {
64
+ 'Status__c': {
65
+ 'normal': 'Active',
66
+ 'error': 'Inactive',
67
+ 'warning': 'Pending',
68
+ 'ok': 'Active',
69
+ 'failed': 'Inactive'
70
+ },
71
+ 'Log_Type__c': {
72
+ 'maint': 'Smart Log',
73
+ 'error': 'Cell Analysis',
74
+ 'ops': 'UV Verification',
75
+ 'maintenance': 'Smart Log',
76
+ 'cell': 'Cell Analysis',
77
+ 'uv': 'UV Verification',
78
+ 'weight log': 'Smart Log'
79
+ }
80
+ }
81
+
82
+ # Cache folder ID for Salesforce reports
83
+ def get_folder_id(folder_name):
84
+ if sf is None:
85
+ return None
86
+ try:
87
+ query = f"SELECT Id FROM Folder WHERE Name = '{folder_name}' AND Type = 'Report'"
88
+ result = sf.query(query)
89
+ if result['totalSize'] > 0:
90
+ folder_id = result['records'][0]['Id']
91
+ logging.info(f"Found folder ID for '{folder_name}': {folder_id}")
92
+ return folder_id
93
+ else:
94
+ logging.error(f"Folder '{folder_name}' not found in Salesforce.")
95
+ return None
96
+ except Exception as e:
97
+ logging.error(f"Failed to fetch folder ID for '{folder_name}': {str(e)}")
98
+ return None
99
+
100
+ LABOPS_REPORTS_FOLDER_ID = get_folder_id('LabOps Reports')
101
+
102
+ # Salesforce report creation
103
+ def create_salesforce_reports(df):
104
+ if sf is None or not LABOPS_REPORTS_FOLDER_ID:
105
+ logging.error("Cannot create Salesforce reports: No connection or folder ID")
106
+ return
107
+ try:
108
+ timestamp = datetime.now().strftime('%Y%m%d_%H%M%S')
109
+ reports = [
110
+ {
111
+ "reportMetadata": {
112
+ "name": f"SmartLog_Usage_Report_{timestamp}",
113
+ "developerName": f"SmartLog_Usage_Report_{timestamp}",
114
+ "reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
115
+ "reportFormat": "TABULAR",
116
+ "reportBooleanFilter": None,
117
+ "reportFilters": [],
118
+ "detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.Usage_Hours__c"],
119
+ "folderId": LABOPS_REPORTS_FOLDER_ID
120
+ }
121
+ },
122
+ {
123
+ "reportMetadata": {
124
+ "name": f"SmartLog_AMC_Reminders_{timestamp}",
125
+ "developerName": f"SmartLog_AMC_Reminders_{timestamp}",
126
+ "reportType": {"type": "CustomEntity", "value": "SmartLog__c"},
127
+ "reportFormat": "TABULAR",
128
+ "reportBooleanFilter": None,
129
+ "reportFilters": [],
130
+ "detailColumns": ["SmartLog__c.Device_Id__c", "SmartLog__c.AMC_Date__c"],
131
+ "folderId": LABOPS_REPORTS_FOLDER_ID
132
+ }
133
+ }
134
+ ]
135
+ for report in reports:
136
+ sf.restful('analytics/reports', method='POST', json=report)
137
+ logging.info("Salesforce reports created successfully")
138
+ except Exception as e:
139
+ logging.error(f"Failed to create Salesforce reports: {str(e)}")
140
+
141
+ # Save to Salesforce
142
+ def save_to_salesforce(df, reminders_df):
143
+ if sf is None:
144
+ logging.error("No Salesforce connection available")
145
+ return
146
+ try:
147
+ logging.info("Starting Salesforce save operation")
148
+ current_date = datetime.now()
149
+ next_30_days = current_date + timedelta(days=30)
150
+ records = []
151
+ reminder_device_ids = set(reminders_df['device_id']) if not reminders_df.empty else set()
152
+ logging.info(f"Processing {len(df)} records for Salesforce")
153
+
154
+ for idx, row in df.iterrows():
155
+ status = str(row['status']).lower()
156
+ log_type = str(row['log_type']).lower()
157
+ status_mapped = picklist_mapping['Status__c'].get(status, status_values[0] if status_values else 'Active')
158
+ log_type_mapped = picklist_mapping['Log_Type__c'].get(log_type, log_type_values[0] if log_type_values else 'Smart Log')
159
+
160
+ if not status_mapped or not log_type_mapped:
161
+ logging.warning(f"Skipping record {idx}: Invalid status ({status}) or log_type ({log_type})")
162
+ continue
163
+
164
+ amc_date_str = None
165
+ if pd.notna(row['amc_date']):
166
+ try:
167
+ amc_date = pd.to_datetime(row['amc_date']).strftime('%Y-%m-%d')
168
+ amc_date_str = amc_date
169
+ amc_date_dt = datetime.strptime(amc_date, '%Y-%m-%d')
170
+ if status_mapped == "Active" and current_date.date() <= amc_date_dt.date() <= next_30_days.date():
171
+ logging.info(f"AMC Reminder for Device ID {row['device_id']}: {amc_date}")
172
+ except Exception as e:
173
+ logging.warning(f"Invalid AMC date for Device ID {row['device_id']}: {str(e)}")
174
+
175
+ record = {
176
+ 'Device_Id__c': str(row['device_id'])[:50],
177
+ 'Log_Type__c': log_type_mapped,
178
+ 'Status__c': status_mapped,
179
+ 'Timestamp__c': row['timestamp'].isoformat() if pd.notna(row['timestamp']) else None,
180
+ 'Usage_Hours__c': float(row['usage_hours']) if pd.notna(row['usage_hours']) else 0.0,
181
+ 'Downtime__c': float(row['downtime']) if pd.notna(row['downtime']) else 0.0,
182
+ 'AMC_Date__c': amc_date_str
183
+ }
184
+ records.append(record)
185
+
186
+ if records:
187
+ batch_size = 100
188
+ for i in range(0, len(records), batch_size):
189
+ batch = records[i:i + batch_size]
190
+ try:
191
+ result = sf.bulk.SmartLog__c.insert(batch)
192
+ logging.info(f"Saved {len(batch)} records to Salesforce in batch {i//batch_size + 1}")
193
+ for res in result:
194
+ if not res['success']:
195
+ logging.error(f"Failed to save record: {res['errors']}")
196
+ except Exception as e:
197
+ logging.error(f"Failed to save batch {i//batch_size + 1}: {str(e)}")
198
+ else:
199
+ logging.warning("No records to save to Salesforce")
200
+ except Exception as e:
201
+ logging.error(f"Failed to save to Salesforce: {str(e)}")
202
+
203
  # Summarize logs
204
  def summarize_logs(df):
205
  try:
 
572
  anomaly_alerts_chart = create_anomaly_alerts_chart(anomalies_df)
573
  device_cards = generate_device_cards(filtered_df)
574
 
575
+ # Save to Salesforce after all other processing
576
+ save_to_salesforce(filtered_df, reminders_df)
577
+ create_salesforce_reports(filtered_df)
578
+
579
  elapsed_time = time.time() - start_time
580
  logging.info(f"Processing completed in {elapsed_time:.2f} seconds")
581
  if elapsed_time > 3: