neerajkalyank commited on
Commit
6073c8f
·
verified ·
1 Parent(s): d97ea9d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +158 -45
app.py CHANGED
@@ -26,8 +26,12 @@ logger = logging.getLogger(__name__)
26
 
27
  # Load environment variables
28
  load_dotenv()
 
 
29
  HF_API_URL = os.getenv("HF_API_URL", "https://api-inference.huggingface.co/models/google/vit-base-patch16-224")
30
  HF_API_TOKEN = os.getenv("HF_API_TOKEN")
 
 
31
  SF_CLIENT_ID = os.getenv("SF_CLIENT_ID")
32
  SF_CLIENT_SECRET = os.getenv("SF_CLIENT_SECRET")
33
  SF_USERNAME = os.getenv("SF_USERNAME")
@@ -35,7 +39,7 @@ SF_PASSWORD = os.getenv("SF_PASSWORD")
35
  SF_SECURITY_TOKEN = os.getenv("SF_SECURITY_TOKEN")
36
  SF_INSTANCE_URL = os.getenv("SF_INSTANCE_URL", "https://your-salesforce-instance.my.salesforce.com")
37
  if not all([SF_CLIENT_ID, SF_CLIENT_SECRET, SF_USERNAME, SF_PASSWORD, SF_SECURITY_TOKEN, SF_INSTANCE_URL]):
38
- raise ValueError("Missing required Salesforce credentials or instance URL in the .env file.")
39
  SALESFORCE_AUTH_URL = f"{SF_INSTANCE_URL}/services/oauth2/token"
40
  SALESFORCE_API_URL = f"{SF_INSTANCE_URL}/services/data/v52.0/sobjects/"
41
  SALESFORCE_ACCESS_TOKEN = None
@@ -51,27 +55,59 @@ except Exception as e:
51
  logger.error(f"Failed to load image processor from {model_name}: {str(e)}")
52
  raise
53
 
54
- # Disease recommendations
55
  disease_recommendations = {
56
  "Apple Scab": {"treatment": "Use captan or myclobutanil fungicide", "fertilizer": "10-10-10 balanced fertilizer", "symptoms": ["olive-colored spots", "scabby lesions"]},
57
- # ... (rest of the dictionary remains unchanged)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
  "Armillaria Root Rot": {"treatment": "Remove stumps, improve drainage", "fertilizer": "Compost-based organic fertilizer", "symptoms": ["root decay", "mushroom growth"]}
59
  }
60
 
 
61
  critical_diseases = [
62
  "Fire Blight", "Oak Wilt", "Bacterial Leaf Scorch", "Sudden Oak Death", "Verticillium Wilt",
63
  "Pine Wilt", "Dutch Elm Disease", "Citrus Canker", "Phytophthora Root Rot", "Chestnut Blight"
64
  ]
65
 
 
66
  id2label = {i: disease for i, disease in enumerate(disease_recommendations.keys())}
67
 
 
68
  offline_queue = queue.Queue()
69
  is_online = True
70
  RETRY_INTERVAL = 5
71
 
 
72
  prediction_logs = []
73
 
74
  def authenticate_salesforce(max_retries=3, retry_delay=5):
 
75
  global SALESFORCE_ACCESS_TOKEN, SALESFORCE_REFRESH_TOKEN
76
  password_with_token = f"{SF_PASSWORD}{SF_SECURITY_TOKEN}"
77
  payload = {
@@ -96,18 +132,16 @@ def authenticate_salesforce(max_retries=3, retry_delay=5):
96
  if attempt < max_retries - 1:
97
  logger.info(f"Retrying authentication in {retry_delay} seconds... (Attempt {attempt + 2}/{max_retries})")
98
  time.sleep(retry_delay)
 
 
 
99
  except Exception as e:
100
  logger.error(f"Unexpected error during Salesforce authentication: {str(e)}")
101
- logger.warning("Salesforce authentication failed after all retries. Retrying in background...")
102
- threading.Thread(target=retry_authentication_periodically, daemon=True).start()
103
  return False
104
 
105
- def retry_authentication_periodically():
106
- while not SALESFORCE_ACCESS_TOKEN:
107
- time.sleep(60)
108
- authenticate_salesforce()
109
-
110
  def refresh_salesforce_token():
 
111
  global SALESFORCE_ACCESS_TOKEN
112
  if not SALESFORCE_REFRESH_TOKEN:
113
  logger.error("No refresh token available for Salesforce.")
@@ -130,11 +164,13 @@ def refresh_salesforce_token():
130
  return False
131
 
132
  def encrypt_sensitive_data(data):
 
133
  if data and data != "Not provided":
134
  return cipher.encrypt(data.encode()).decode()
135
  return data
136
 
137
  def decrypt_sensitive_data(encrypted_data):
 
138
  if encrypted_data and encrypted_data != "Not provided":
139
  try:
140
  return cipher.decrypt(encrypted_data.encode()).decode()
@@ -143,6 +179,7 @@ def decrypt_sensitive_data(encrypted_data):
143
  return encrypted_data
144
 
145
  def check_network_status():
 
146
  global is_online
147
  try:
148
  response = requests.get("https://www.google.com", timeout=2)
@@ -152,39 +189,74 @@ def check_network_status():
152
  return is_online
153
 
154
  def sync_offline_logs():
 
155
  if not is_online or not SALESFORCE_ACCESS_TOKEN:
156
  logger.warning("Skipping sync: Either offline or no Salesforce access token.")
157
  return
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
158
  for filename in ["offline_logs.json", "offline_updates.json", "offline_cases.json"]:
159
  try:
160
- with open(filename, "r") as f:
161
- records = [json.loads(line) for line in f if line.strip()]
162
- for record in records:
163
- headers = {"Authorization": f"Bearer {SALESFORCE_ACCESS_TOKEN}", "Content-Type": "application/json"}
164
- if filename == "offline_logs.json":
165
- endpoint = f"{SALESFORCE_API_URL}DiagnosisLog__c"
166
- elif filename == "offline_updates.json":
167
- endpoint = f"{SALESFORCE_API_URL}TreeInspection__c/{record['inspection_id']}"
168
- record = record["data"]
169
- elif filename == "offline_cases.json":
170
- endpoint = f"{SALESFORCE_API_URL}Case"
171
- response = requests.post(endpoint, headers=headers, json=record, timeout=10) if filename != "offline_updates.json" else requests.patch(endpoint, headers=headers, json=record, timeout=10)
172
- if response.status_code in (201, 204):
173
- logger.info(f"Synced {filename.split('.')[0]} for inspection {record.get('TreeInspection__c', 'N/A')}")
174
- else:
175
- logger.error(f"Failed to sync {filename}: {response.text}, Status: {response.status_code}")
176
  open(filename, "w").close()
177
- except FileNotFoundError:
178
- logger.info(f"No {filename} file found to sync.")
179
  except Exception as e:
180
- logger.error(f"Error syncing {filename}: {str(e)}")
181
 
182
  def process_offline_queue():
 
183
  while not offline_queue.empty() and is_online and SALESFORCE_ACCESS_TOKEN and HF_API_TOKEN:
184
  request_data = offline_queue.get()
185
  try:
186
  headers = {"Authorization": f"Bearer {HF_API_TOKEN}", "Content-Type": "application/json"}
187
- response = requests.post(HF_API_URL, headers=headers, json=request_data["payload"], timeout=10)
 
 
 
 
 
188
  if response.status_code == 200:
189
  result = response.json()
190
  logger.info(f"Processed offline queue item, response: {result}")
@@ -194,12 +266,15 @@ def process_offline_queue():
194
  create_salesforce_case(request_data["inspection_id"], result)
195
  else:
196
  logger.error(f"Failed to process offline queue item: {response.text}, Status: {response.status_code}")
 
197
  offline_queue.put(request_data)
198
  except Exception as e:
199
  logger.error(f"Error processing offline queue: {str(e)}")
 
200
  offline_queue.put(request_data)
201
 
202
  def log_to_salesforce(inspection_id, payload, response, status):
 
203
  log_data = {
204
  "TreeInspection__c": inspection_id,
205
  "PayloadSent__c": json.dumps(payload),
@@ -229,6 +304,7 @@ def log_to_salesforce(inspection_id, payload, response, status):
229
  f.write("\n")
230
 
231
  def update_salesforce_record(inspection_id, response):
 
232
  disease = response.get("disease_prediction")
233
  if not disease or not isinstance(disease, str):
234
  logger.warning(f"Invalid or missing disease_prediction in response: {response}. Defaulting to 'Unknown'.")
@@ -269,6 +345,7 @@ def update_salesforce_record(inspection_id, response):
269
  f.write("\n")
270
 
271
  def create_salesforce_case(inspection_id, response):
 
272
  disease = response.get("disease_prediction", "Unknown")
273
  if not isinstance(disease, str) or disease not in disease_recommendations:
274
  logger.warning(f"Invalid disease for case creation: {disease}. Using 'Unknown'.")
@@ -302,6 +379,7 @@ def create_salesforce_case(inspection_id, response):
302
  f.write("\n")
303
 
304
  def create_salesforce_inspection_record(tree_type, location, date, region, officer, field_team):
 
305
  inspection_id = str(uuid.uuid4())
306
  record_data = {
307
  "TreeName__c": tree_type or "Not provided",
@@ -325,11 +403,12 @@ def create_salesforce_inspection_record(tree_type, location, date, region, offic
325
  with open("offline_updates.json", "a") as f:
326
  json.dump({"inspection_id": inspection_id, "data": record_data}, f)
327
  f.write("\n")
 
328
  else:
329
  with open("offline_updates.json", "a") as f:
330
  json.dump({"inspection_id": inspection_id, "data": record_data}, f)
331
  f.write("\n")
332
- return inspection_id
333
  except Exception as e:
334
  logger.error(f"Error creating Salesforce inspection record: {str(e)}")
335
  with open("offline_updates.json", "a") as f:
@@ -338,6 +417,7 @@ def create_salesforce_inspection_record(tree_type, location, date, region, offic
338
  return inspection_id
339
 
340
  def integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, location, date, region, officer, field_team):
 
341
  try:
342
  if not HF_API_TOKEN:
343
  logger.warning("Hugging Face API token is missing. Skipping API call and queuing data.")
@@ -361,6 +441,8 @@ def integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, locatio
361
 
362
  payload = {"image": image_base64}
363
  headers = {"Authorization": f"Bearer {HF_API_TOKEN}", "Content-Type": "application/json"}
 
 
364
  max_retries = 3
365
  for attempt in range(max_retries):
366
  try:
@@ -368,7 +450,7 @@ def integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, locatio
368
  response.raise_for_status()
369
  break
370
  except requests.exceptions.RequestException as e:
371
- logger.error(f"Hugging Face API call failed (Attempt {attempt + 1}/{max_retries}): {str(e)}")
372
  if attempt == max_retries - 1:
373
  log_to_salesforce(inspection_id, payload, str(e), "Failed")
374
  return None
@@ -376,15 +458,17 @@ def integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, locatio
376
 
377
  try:
378
  hf_response = response.json()
 
379
  if "disease_prediction" not in hf_response or "confidence" not in hf_response:
380
- logger.error(f"Invalid Hugging Face response format: {hf_response}")
381
  log_to_salesforce(inspection_id, payload, "Invalid response format", "Failed")
382
  return None
383
  except json.JSONDecodeError as e:
384
- logger.error(f"Failed to decode Hugging Face response: {str(e)}")
385
  log_to_salesforce(inspection_id, payload, str(e), "Failed")
386
  return None
387
 
 
388
  disease = hf_response.get("disease_prediction")
389
  if not disease or not isinstance(disease, str):
390
  logger.warning(f"Invalid or missing disease_prediction in response: {hf_response}. Defaulting to 'Unknown'.")
@@ -426,13 +510,13 @@ def integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, locatio
426
  }
427
  log_response = requests.post(f"{SALESFORCE_API_URL}DiagnosisLog__c", headers=headers, json=log_data, timeout=10)
428
  if log_response.status_code != 201:
429
- logger.error(f"Failed to log to DiagnosisLog__c: {log_response.text}")
430
  if disease in critical_diseases or confidence < 0.6:
431
  create_salesforce_case(updated_id, {"disease_prediction": disease, "confidence": confidence})
432
  return updated_id
433
  else:
434
  error_msg = create_response.text
435
- logger.error(f"Failed to save to Salesforce: {error_msg}")
436
  log_to_salesforce(inspection_id, payload, error_msg, "Failed")
437
  return None
438
  else:
@@ -457,12 +541,15 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
457
  start_time = time.time()
458
  inspection_id = create_salesforce_inspection_record(tree_type, location, date, region, officer, field_team)
459
 
 
460
  location_enc = encrypt_sensitive_data(location)
461
  officer_enc = encrypt_sensitive_data(officer)
462
 
 
463
  if image is None and not base64_image:
464
  return ("Error: No image or Base64 string provided.", None, None, None, None, None, None, None, None)
465
 
 
466
  if base64_image:
467
  try:
468
  image_data = base64.b64decode(base64_image)
@@ -472,6 +559,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
472
  elif image is None:
473
  return ("Error: No valid image provided.", None, None, None, None, None, None, None, None)
474
 
 
475
  image = image.convert("RGB")
476
  img_array = np.array(image)
477
  if img_array.ndim != 3 or img_array.shape[2] != 3:
@@ -480,6 +568,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
480
  if image.size[0] < 224 or image.size[1] < 224:
481
  return ("Error: Image resolution too low. Minimum 224x224 required.", None, None, None, None, None, None, None, None)
482
 
 
483
  inputs = image_processor(images=image, return_tensors="pt")
484
  pixel_values = inputs["pixel_values"].squeeze(0).numpy()
485
  pixel_values = ((pixel_values - pixel_values.min()) * 255 / (pixel_values.max() - pixel_values.min())).astype(np.uint8)
@@ -489,13 +578,16 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
489
  image_base64 = base64.b64encode(buffered.getvalue()).decode("utf-8")
490
  payload = {"image": image_base64}
491
 
 
492
  logger.info(f"Original Image shape: {img_array.shape}")
493
  logger.info(f"Preprocessed Pixel Values shape: {pixel_values.shape}")
494
 
 
495
  updated_id = integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, location, date, region, officer, field_team)
496
  if not updated_id:
497
  return ("Error: Failed to integrate with Hugging Face and save to Salesforce. Check logs for details.", None, None, None, None, None, None, None, None)
498
 
 
499
  if is_online and SALESFORCE_ACCESS_TOKEN:
500
  headers = {"Authorization": f"Bearer {SALESFORCE_ACCESS_TOKEN}", "Content-Type": "application/json"}
501
  get_response = requests.get(f"{SALESFORCE_API_URL}TreeInspection__c/{updated_id}", headers=headers, timeout=10)
@@ -506,7 +598,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
506
  treatment = record.get("TreatmentPlan__c", "")
507
  fertilizer = record.get("Fertilizer__c", "")
508
  else:
509
- logger.error(f"Failed to retrieve Salesforce record: {get_response.text}")
510
  predicted_label = "Unknown"
511
  confidence_score = 0.0
512
  treatment = ""
@@ -517,10 +609,12 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
517
  treatment = ""
518
  fertilizer = ""
519
 
 
520
  buffered_output = BytesIO()
521
  image.save(buffered_output, format="JPEG")
522
  output_base64 = base64.b64encode(buffered_output.getvalue()).decode("utf-8")
523
 
 
524
  metadata_summary = (
525
  f"Tree Type: {tree_type or 'Not provided'}\n"
526
  f"Location: {decrypt_sensitive_data(location_enc) or 'Not provided'}\n"
@@ -530,6 +624,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
530
  f"Field Team: {field_team or 'Not provided'}"
531
  )
532
 
 
533
  alert_status = "No alert triggered."
534
  logger.info(f"Checking alert: predicted_label={predicted_label}, confidence_score={confidence_score}")
535
  if predicted_label in critical_diseases or confidence_score < 0.6:
@@ -550,6 +645,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
550
  prediction_logs.append(case_details)
551
  logger.info(f"Added to prediction_logs: {case_details}")
552
 
 
553
  if not prediction_logs:
554
  prediction_logs.append({
555
  "case_id": str(uuid.uuid4()),
@@ -565,6 +661,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
565
  })
566
  logger.info("Added default entry to prediction_logs")
567
 
 
568
  try:
569
  with open("prediction_logs.json", "w") as f:
570
  json.dump(prediction_logs, f, indent=2)
@@ -572,6 +669,7 @@ def predict_tree_disease(image=None, base64_image="", tree_type="", location="",
572
  except Exception as e:
573
  logger.error(f"Error saving prediction log: {str(e)}")
574
 
 
575
  response_time = time.time() - start_time
576
  if response_time > 5:
577
  logger.warning(f"Response time {response_time:.2f} seconds exceeds 5-second requirement.")
@@ -595,29 +693,35 @@ def generate_report():
595
  if not prediction_logs:
596
  logger.info("No data available for reporting.")
597
  return "Tree Health Report:\n\nNo data available for reporting."
 
598
  regions = {}
599
  diseases = {}
600
  confidences = []
601
  fertilizers = {}
602
  critical_alerts = 0
 
603
  for log in prediction_logs:
604
  region = log["region"]
605
  disease = log["disease"]
606
  confidence = log["confidence"]
607
  fertilizer = disease_recommendations.get(disease, {}).get("fertilizer", "Unknown")
 
608
  regions[region] = regions.get(region, 0) + 1
609
  diseases[disease] = diseases.get(disease, 0) + 1
610
  confidences.append(confidence)
611
  fertilizers[fertilizer] = fertilizers.get(fertilizer, 0) + 1
612
  if disease in critical_diseases or confidence < 0.6:
613
  critical_alerts += 1
 
614
  report = "Tree Health Report:\n\n"
615
  report += "1. Tree Inspection Volume by Region (Last 30 Days):\n"
616
  for region, count in regions.items():
617
  report += f" - {region}: {count} inspections\n"
 
618
  report += "\n2. Most Frequent Diseases (Last 90 Days):\n"
619
  for disease, count in sorted(diseases.items(), key=lambda x: x[1], reverse=True)[:5]:
620
  report += f" - {disease}: {count} cases\n"
 
621
  report += "\n3. Confidence Distribution (Last 30 Days):\n"
622
  confidence_bins = {"0.5-0.7": 0, "0.7-0.9": 0, "0.9-1.0": 0}
623
  for c in confidences:
@@ -629,9 +733,11 @@ def generate_report():
629
  confidence_bins["0.9-1.0"] += 1
630
  for bin_range, count in confidence_bins.items():
631
  report += f" - {bin_range}: {count} cases\n"
 
632
  report += "\n4. Fertilizer Usage (Last 60 Days):\n"
633
  for fertilizer, count in sorted(fertilizers.items(), key=lambda x: x[1], reverse=True):
634
  report += f" - {fertilizer}: {count} recommendations\n"
 
635
  report += f"\n5. Critical Disease Alerts: {critical_alerts} alerts\n"
636
  logger.info(f"Generated report with {len(regions)} regions, {len(diseases)} diseases")
637
  return report
@@ -640,27 +746,33 @@ def render_dashboard(region_filter="", officer_filter="", field_team_filter=""):
640
  if not prediction_logs:
641
  logger.info("No data available for dashboard.")
642
  return "Tree Health Intelligence Dashboard:\n\nNo data available for dashboard."
 
643
  filtered_logs = [log for log in prediction_logs if
644
  (not region_filter or log["region"] == region_filter) and
645
  (not officer_filter or log["officer"] == decrypt_sensitive_data(officer_filter)) and
646
  (not field_team_filter or log["field_team"] == field_team_filter)]
 
647
  if not filtered_logs:
648
  logger.info("No data matches the selected filters.")
649
  return "Tree Health Intelligence Dashboard:\n\nNo data matches the selected filters."
 
650
  dashboard = "Tree Health Intelligence Dashboard:\n\n"
651
  dashboard += f"Total Entries: {len(filtered_logs)}\n"
 
652
  regions = {}
653
  for log in filtered_logs:
654
  regions[log["region"]] = regions.get(log["region"], 0) + 1
655
  dashboard += "\nRegional Activity:\n"
656
  for region, count in regions.items():
657
  dashboard += f" - {region}: {count} inspections\n"
 
658
  diseases = {}
659
  for log in filtered_logs:
660
  diseases[log["disease"]] = diseases.get(log["disease"], 0) + 1
661
  dashboard += "\nDisease Patterns:\n"
662
  for disease, count in sorted(diseases.items(), key=lambda x: x[1], reverse=True):
663
  dashboard += f" - {disease}: {count} cases\n"
 
664
  confidence_bins = {"0.5-0.7": 0, "0.7-0.9": 0, "0.9-1.0": 0}
665
  for log in filtered_logs:
666
  c = log["confidence"]
@@ -673,6 +785,7 @@ def render_dashboard(region_filter="", officer_filter="", field_team_filter=""):
673
  dashboard += "\nModel Confidence Spread:\n"
674
  for bin_range, count in confidence_bins.items():
675
  dashboard += f" - {bin_range}: {count} cases\n"
 
676
  fertilizers = {}
677
  for log in filtered_logs:
678
  fertilizer = disease_recommendations.get(log["disease"], {}).get("fertilizer", "Unknown")
@@ -680,29 +793,29 @@ def render_dashboard(region_filter="", officer_filter="", field_team_filter=""):
680
  dashboard += "\nFertilizer Insights:\n"
681
  for fertilizer, count in sorted(fertilizers.items(), key=lambda x: x[1], reverse=True):
682
  dashboard += f" - {fertilizer}: {count} recommendations\n"
 
683
  critical_cases = [log for log in filtered_logs if log["disease"] in critical_diseases or log["confidence"] < 0.6]
684
  dashboard += f"\nCritical Disease Alerts: {len(critical_cases)} alerts\n"
685
  for log in critical_cases[:5]:
686
  dashboard += f" - {log['disease']} (Confidence: {log['confidence']:.2f}) at {log['timestamp']}\n"
 
687
  logger.info(f"Generated dashboard with {len(filtered_logs)} entries")
688
  return dashboard
689
 
690
  def offline_queue_processor():
 
691
  while True:
692
  if check_network_status() and SALESFORCE_ACCESS_TOKEN and HF_API_TOKEN:
693
  sync_offline_logs()
694
  process_offline_queue()
695
  time.sleep(RETRY_INTERVAL)
696
 
697
- def check_and_sync():
698
- if check_network_status() and authenticate_salesforce():
699
- sync_offline_logs()
700
- process_offline_queue()
701
-
702
  if not authenticate_salesforce():
703
- logger.warning("Salesforce authentication failed initially. Retrying in background.")
704
- threading.Thread(target=retry_authentication_periodically, daemon=True).start()
705
- threading.Thread(target=lambda: [check_and_sync(), time.sleep(60)], daemon=True).start()
 
706
 
707
  iface = gr.Interface(
708
  fn=predict_tree_disease,
 
26
 
27
  # Load environment variables
28
  load_dotenv()
29
+
30
+ # Configuration with validation
31
  HF_API_URL = os.getenv("HF_API_URL", "https://api-inference.huggingface.co/models/google/vit-base-patch16-224")
32
  HF_API_TOKEN = os.getenv("HF_API_TOKEN")
33
+ if not HF_API_TOKEN:
34
+ logger.warning("HF_API_TOKEN is missing. Hugging Face API calls will be skipped, running in offline mode.")
35
  SF_CLIENT_ID = os.getenv("SF_CLIENT_ID")
36
  SF_CLIENT_SECRET = os.getenv("SF_CLIENT_SECRET")
37
  SF_USERNAME = os.getenv("SF_USERNAME")
 
39
  SF_SECURITY_TOKEN = os.getenv("SF_SECURITY_TOKEN")
40
  SF_INSTANCE_URL = os.getenv("SF_INSTANCE_URL", "https://your-salesforce-instance.my.salesforce.com")
41
  if not all([SF_CLIENT_ID, SF_CLIENT_SECRET, SF_USERNAME, SF_PASSWORD, SF_SECURITY_TOKEN, SF_INSTANCE_URL]):
42
+ logger.warning("Missing Salesforce credentials. Salesforce integration will be queued for offline processing.")
43
  SALESFORCE_AUTH_URL = f"{SF_INSTANCE_URL}/services/oauth2/token"
44
  SALESFORCE_API_URL = f"{SF_INSTANCE_URL}/services/data/v52.0/sobjects/"
45
  SALESFORCE_ACCESS_TOKEN = None
 
55
  logger.error(f"Failed to load image processor from {model_name}: {str(e)}")
56
  raise
57
 
58
+ # Disease recommendations (aligned with Salesforce picklist values)
59
  disease_recommendations = {
60
  "Apple Scab": {"treatment": "Use captan or myclobutanil fungicide", "fertilizer": "10-10-10 balanced fertilizer", "symptoms": ["olive-colored spots", "scabby lesions"]},
61
+ "Anthracnose": {"treatment": "Prune infected leaves, copper fungicide", "fertilizer": "Nitrogen-rich fertilizer", "symptoms": ["leaf spots", "wilting"]},
62
+ "Bacterial Leaf Scorch": {"treatment": "Remove infected branches, apply bactericide", "fertilizer": "Low-nitrogen organic fertilizer", "symptoms": ["scorched margins", "yellowing"]},
63
+ "Beech Bark Disease": {"treatment": "Remove infected trees, insecticide for scale", "fertilizer": "Slow-release fertilizer", "symptoms": ["bark lesions", "scale infestation"]},
64
+ "Cedar Apple Rust": {"treatment": "Fungicide in early spring", "fertilizer": "Balanced NPK fertilizer", "symptoms": ["yellow spots", "orange galls"]},
65
+ "Chestnut Blight": {"treatment": "Prune affected limbs, biological control", "fertilizer": "Slow-release fertilizer in spring", "symptoms": ["cankers", "wilting"]},
66
+ "Citrus Canker": {"treatment": "Copper-based sprays, remove infected trees", "fertilizer": "Citrus-specific fertilizer", "symptoms": ["corky lesions", "water spots"]},
67
+ "Dutch Elm Disease": {"treatment": "Systemic fungicides, prune infected limbs", "fertilizer": "Low-nitrogen fertilizer", "symptoms": ["wilting", "yellow leaves"]},
68
+ "Dogwood Anthracnose": {"treatment": "Fungicide, remove infected tissue", "fertilizer": "Balanced fertilizer in early spring", "symptoms": ["leaf spots", "dieback"]},
69
+ "Fire Blight": {"treatment": "Prune during dormancy, copper sprays", "fertilizer": "Avoid excess nitrogen", "symptoms": ["blackened leaves", "scorched shoots"]},
70
+ "Fusarium Wilt": {"treatment": "Remove infected plants, soil solarization", "fertilizer": "Potassium-rich fertilizer", "symptoms": ["wilting", "yellowing"]},
71
+ "Gummosis": {"treatment": "Improve drainage, copper sprays", "fertilizer": "Citrus or stone fruit fertilizer", "symptoms": ["gummy exudates", "bark cracks"]},
72
+ "Huanglongbing (Citrus Greening)": {"treatment": "Remove infected trees, insecticide", "fertilizer": "Micronutrient mix for citrus", "symptoms": ["yellow mottling", "asymmetric fruit"]},
73
+ "Leaf Spot": {"treatment": "Fungicide sprays, remove debris", "fertilizer": "General-purpose fertilizer", "symptoms": ["small spots", "yellow halos"]},
74
+ "Oak Wilt": {"treatment": "Trenching, systemic fungicides", "fertilizer": "Minimal fertilization, compost mulch", "symptoms": ["vein browning", "leaf wilt"]},
75
+ "Pine Wilt": {"treatment": "Remove infested trees, control beetles", "fertilizer": "No fertilization during stress", "symptoms": ["needle browning", "tree decline"]},
76
+ "Powdery Mildew": {"treatment": "Sulfur-based sprays", "fertilizer": "Avoid high-nitrogen fertilizers", "symptoms": ["white powdery coating", "white patches"]},
77
+ "Phytophthora Root Rot": {"treatment": "Improve soil drainage, fungicide", "fertilizer": "Organic compost, no excess water", "symptoms": ["root decay", "wilting"]},
78
+ "Sooty Mold": {"treatment": "Control insect pests, wash off mold", "fertilizer": "Supportive fertilization (10-10-10)", "symptoms": ["black mold", "sticky residue"]},
79
+ "Sudden Oak Death": {"treatment": "Phosphonates, sanitation", "fertilizer": "Organic mulch, avoid excess nitrogen", "symptoms": ["cankers", "leaf loss"]},
80
+ "Verticillium Wilt": {"treatment": "Remove infected branches, crop rotation", "fertilizer": "Low-nitrogen balanced fertilizer", "symptoms": ["sudden wilting", "branch dieback"]},
81
+ "Thousand Cankers Disease": {"treatment": "Remove infected trees", "fertilizer": "Mulch and deep water during drought", "symptoms": ["cankers", "wilting"]},
82
+ "Tar Spot": {"treatment": "Rake and destroy leaves, fungicide", "fertilizer": "General-purpose fertilizer", "symptoms": ["black tar spots", "shiny spots"]},
83
+ "Rusts": {"treatment": "Fungicides during early infection", "fertilizer": "Balanced fertilizer to boost immunity", "symptoms": ["orange pustules", "rusty spots"]},
84
+ "Needle Cast": {"treatment": "Fungicides (chlorothalonil), remove infected needles", "fertilizer": "Conifer-specific fertilizer", "symptoms": ["needle spots", "yellow bands"]},
85
+ "White Pine Blister Rust": {"treatment": "Remove alternate hosts, fungicides", "fertilizer": "Minimal fertilization", "symptoms": ["blister-like swellings", "needle drop"]},
86
+ "Root Rot": {"treatment": "Improve drainage, avoid overwatering", "fertilizer": "Compost and balanced fertilizer", "symptoms": ["root decay", "stunted growth"]},
87
+ "Black Knot": {"treatment": "Prune 2-4 inches below knots", "fertilizer": "Spring fertilization with 10-10-10", "symptoms": ["black swellings", "rough knots"]},
88
+ "Botryosphaeria Canker": {"treatment": "Remove infected branches", "fertilizer": "Avoid over-fertilization", "symptoms": ["cankers", "discolored bark"]},
89
  "Armillaria Root Rot": {"treatment": "Remove stumps, improve drainage", "fertilizer": "Compost-based organic fertilizer", "symptoms": ["root decay", "mushroom growth"]}
90
  }
91
 
92
+ # Critical diseases
93
  critical_diseases = [
94
  "Fire Blight", "Oak Wilt", "Bacterial Leaf Scorch", "Sudden Oak Death", "Verticillium Wilt",
95
  "Pine Wilt", "Dutch Elm Disease", "Citrus Canker", "Phytophthora Root Rot", "Chestnut Blight"
96
  ]
97
 
98
+ # id2label mapping
99
  id2label = {i: disease for i, disease in enumerate(disease_recommendations.keys())}
100
 
101
+ # Offline queue for API calls
102
  offline_queue = queue.Queue()
103
  is_online = True
104
  RETRY_INTERVAL = 5
105
 
106
+ # In-memory storage for prediction data
107
  prediction_logs = []
108
 
109
  def authenticate_salesforce(max_retries=3, retry_delay=5):
110
+ """Authenticate with Salesforce with retry logic."""
111
  global SALESFORCE_ACCESS_TOKEN, SALESFORCE_REFRESH_TOKEN
112
  password_with_token = f"{SF_PASSWORD}{SF_SECURITY_TOKEN}"
113
  payload = {
 
132
  if attempt < max_retries - 1:
133
  logger.info(f"Retrying authentication in {retry_delay} seconds... (Attempt {attempt + 2}/{max_retries})")
134
  time.sleep(retry_delay)
135
+ else:
136
+ logger.error("Salesforce authentication failed after all retries.")
137
+ return False
138
  except Exception as e:
139
  logger.error(f"Unexpected error during Salesforce authentication: {str(e)}")
140
+ return False
 
141
  return False
142
 
 
 
 
 
 
143
  def refresh_salesforce_token():
144
+ """Refresh Salesforce access token."""
145
  global SALESFORCE_ACCESS_TOKEN
146
  if not SALESFORCE_REFRESH_TOKEN:
147
  logger.error("No refresh token available for Salesforce.")
 
164
  return False
165
 
166
  def encrypt_sensitive_data(data):
167
+ """Encrypt sensitive data."""
168
  if data and data != "Not provided":
169
  return cipher.encrypt(data.encode()).decode()
170
  return data
171
 
172
  def decrypt_sensitive_data(encrypted_data):
173
+ """Decrypt sensitive data."""
174
  if encrypted_data and encrypted_data != "Not provided":
175
  try:
176
  return cipher.decrypt(encrypted_data.encode()).decode()
 
179
  return encrypted_data
180
 
181
  def check_network_status():
182
+ """Check network status."""
183
  global is_online
184
  try:
185
  response = requests.get("https://www.google.com", timeout=2)
 
189
  return is_online
190
 
191
  def sync_offline_logs():
192
+ """Sync offline logs, updates, and cases to Salesforce."""
193
  if not is_online or not SALESFORCE_ACCESS_TOKEN:
194
  logger.warning("Skipping sync: Either offline or no Salesforce access token.")
195
  return
196
+ try:
197
+ with open("offline_logs.json", "r") as f:
198
+ logs = [json.loads(line) for line in f if line.strip()]
199
+ for log in logs:
200
+ headers = {"Authorization": f"Bearer {SALESFORCE_ACCESS_TOKEN}", "Content-Type": "application/json"}
201
+ response = requests.post(f"{SALESFORCE_API_URL}DiagnosisLog__c", headers=headers, json=log, timeout=10)
202
+ if response.status_code == 201:
203
+ logger.info(f"Synced offline log for inspection {log['TreeInspection__c']}")
204
+ else:
205
+ logger.error(f"Failed to sync offline log: {response.text}, Status: {response.status_code}")
206
+ except FileNotFoundError:
207
+ logger.info("No offline_logs.json file found to sync.")
208
+ except Exception as e:
209
+ logger.error(f"Error syncing offline logs: {str(e)}")
210
+
211
+ try:
212
+ with open("offline_updates.json", "r") as f:
213
+ updates = [json.loads(line) for line in f if line.strip()]
214
+ for update in updates:
215
+ headers = {"Authorization": f"Bearer {SALESFORCE_ACCESS_TOKEN}", "Content-Type": "application/json"}
216
+ response = requests.patch(f"{SALESFORCE_API_URL}TreeInspection__c/{update['inspection_id']}", headers=headers, json=update["data"], timeout=10)
217
+ if response.status_code == 204:
218
+ logger.info(f"Synced offline update for inspection {update['inspection_id']}")
219
+ else:
220
+ logger.error(f"Failed to sync offline update: {response.text}, Status: {response.status_code}")
221
+ except FileNotFoundError:
222
+ logger.info("No offline_updates.json file found to sync.")
223
+ except Exception as e:
224
+ logger.error(f"Error syncing offline updates: {str(e)}")
225
+
226
+ try:
227
+ with open("offline_cases.json", "r") as f:
228
+ cases = [json.loads(line) for line in f if line.strip()]
229
+ for case in cases:
230
+ headers = {"Authorization": f"Bearer {SALESFORCE_ACCESS_TOKEN}", "Content-Type": "application/json"}
231
+ response = requests.post(f"{SALESFORCE_API_URL}Case", headers=headers, json=case, timeout=10)
232
+ if response.status_code == 201:
233
+ logger.info(f"Synced offline case for inspection {case['TreeInspection__c']}")
234
+ else:
235
+ logger.error(f"Failed to sync offline case: {response.text}, Status: {response.status_code}")
236
+ except FileNotFoundError:
237
+ logger.info("No offline_cases.json file found to sync.")
238
+ except Exception as e:
239
+ logger.error(f"Error syncing offline cases: {str(e)}")
240
+
241
+ # Clear files after successful sync
242
  for filename in ["offline_logs.json", "offline_updates.json", "offline_cases.json"]:
243
  try:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
244
  open(filename, "w").close()
 
 
245
  except Exception as e:
246
+ logger.error(f"Failed to clear {filename}: {str(e)}")
247
 
248
  def process_offline_queue():
249
+ """Process queued API calls when online."""
250
  while not offline_queue.empty() and is_online and SALESFORCE_ACCESS_TOKEN and HF_API_TOKEN:
251
  request_data = offline_queue.get()
252
  try:
253
  headers = {"Authorization": f"Bearer {HF_API_TOKEN}", "Content-Type": "application/json"}
254
+ response = requests.post(
255
+ HF_API_URL,
256
+ headers=headers,
257
+ json=request_data["payload"],
258
+ timeout=10
259
+ )
260
  if response.status_code == 200:
261
  result = response.json()
262
  logger.info(f"Processed offline queue item, response: {result}")
 
266
  create_salesforce_case(request_data["inspection_id"], result)
267
  else:
268
  logger.error(f"Failed to process offline queue item: {response.text}, Status: {response.status_code}")
269
+ log_to_salesforce(request_data["inspection_id"], request_data["payload"], response.text, "Failed")
270
  offline_queue.put(request_data)
271
  except Exception as e:
272
  logger.error(f"Error processing offline queue: {str(e)}")
273
+ log_to_salesforce(request_data["inspection_id"], request_data["payload"], str(e), "Failed")
274
  offline_queue.put(request_data)
275
 
276
  def log_to_salesforce(inspection_id, payload, response, status):
277
+ """Log request and response to Salesforce DiagnosisLog__c."""
278
  log_data = {
279
  "TreeInspection__c": inspection_id,
280
  "PayloadSent__c": json.dumps(payload),
 
304
  f.write("\n")
305
 
306
  def update_salesforce_record(inspection_id, response):
307
+ """Update TreeInspection__c record in Salesforce with Hugging Face prediction data."""
308
  disease = response.get("disease_prediction")
309
  if not disease or not isinstance(disease, str):
310
  logger.warning(f"Invalid or missing disease_prediction in response: {response}. Defaulting to 'Unknown'.")
 
345
  f.write("\n")
346
 
347
  def create_salesforce_case(inspection_id, response):
348
+ """Create a follow-up case in Salesforce."""
349
  disease = response.get("disease_prediction", "Unknown")
350
  if not isinstance(disease, str) or disease not in disease_recommendations:
351
  logger.warning(f"Invalid disease for case creation: {disease}. Using 'Unknown'.")
 
379
  f.write("\n")
380
 
381
  def create_salesforce_inspection_record(tree_type, location, date, region, officer, field_team):
382
+ """Create a new TreeInspection__c record in Salesforce."""
383
  inspection_id = str(uuid.uuid4())
384
  record_data = {
385
  "TreeName__c": tree_type or "Not provided",
 
403
  with open("offline_updates.json", "a") as f:
404
  json.dump({"inspection_id": inspection_id, "data": record_data}, f)
405
  f.write("\n")
406
+ return inspection_id
407
  else:
408
  with open("offline_updates.json", "a") as f:
409
  json.dump({"inspection_id": inspection_id, "data": record_data}, f)
410
  f.write("\n")
411
+ return inspection_id
412
  except Exception as e:
413
  logger.error(f"Error creating Salesforce inspection record: {str(e)}")
414
  with open("offline_updates.json", "a") as f:
 
417
  return inspection_id
418
 
419
  def integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, location, date, region, officer, field_team):
420
+ """Integrate Hugging Face prediction with Salesforce by saving records."""
421
  try:
422
  if not HF_API_TOKEN:
423
  logger.warning("Hugging Face API token is missing. Skipping API call and queuing data.")
 
441
 
442
  payload = {"image": image_base64}
443
  headers = {"Authorization": f"Bearer {HF_API_TOKEN}", "Content-Type": "application/json"}
444
+
445
+ # Call Hugging Face API with retry logic
446
  max_retries = 3
447
  for attempt in range(max_retries):
448
  try:
 
450
  response.raise_for_status()
451
  break
452
  except requests.exceptions.RequestException as e:
453
+ logger.error(f"Hugging Face API call failed (Attempt {attempt + 1}/{max_retries}): {str(e)}, Response: {getattr(e.response, 'text', 'No response')}")
454
  if attempt == max_retries - 1:
455
  log_to_salesforce(inspection_id, payload, str(e), "Failed")
456
  return None
 
458
 
459
  try:
460
  hf_response = response.json()
461
+ logger.info(f"Hugging Face response for inspection {inspection_id}: {hf_response}")
462
  if "disease_prediction" not in hf_response or "confidence" not in hf_response:
463
+ logger.error(f"Invalid Hugging Face response format: {hf_response}. Expected 'disease_prediction' and 'confidence'.")
464
  log_to_salesforce(inspection_id, payload, "Invalid response format", "Failed")
465
  return None
466
  except json.JSONDecodeError as e:
467
+ logger.error(f"Failed to decode Hugging Face response: {str(e)}, Raw response: {response.text}")
468
  log_to_salesforce(inspection_id, payload, str(e), "Failed")
469
  return None
470
 
471
+ # Validate and normalize disease name
472
  disease = hf_response.get("disease_prediction")
473
  if not disease or not isinstance(disease, str):
474
  logger.warning(f"Invalid or missing disease_prediction in response: {hf_response}. Defaulting to 'Unknown'.")
 
510
  }
511
  log_response = requests.post(f"{SALESFORCE_API_URL}DiagnosisLog__c", headers=headers, json=log_data, timeout=10)
512
  if log_response.status_code != 201:
513
+ logger.error(f"Failed to log to DiagnosisLog__c: {log_response.text}, Status: {log_response.status_code}")
514
  if disease in critical_diseases or confidence < 0.6:
515
  create_salesforce_case(updated_id, {"disease_prediction": disease, "confidence": confidence})
516
  return updated_id
517
  else:
518
  error_msg = create_response.text
519
+ logger.error(f"Failed to save to Salesforce: {error_msg}, Status: {create_response.status_code}")
520
  log_to_salesforce(inspection_id, payload, error_msg, "Failed")
521
  return None
522
  else:
 
541
  start_time = time.time()
542
  inspection_id = create_salesforce_inspection_record(tree_type, location, date, region, officer, field_team)
543
 
544
+ # Encrypt sensitive data
545
  location_enc = encrypt_sensitive_data(location)
546
  officer_enc = encrypt_sensitive_data(officer)
547
 
548
+ # Validate input
549
  if image is None and not base64_image:
550
  return ("Error: No image or Base64 string provided.", None, None, None, None, None, None, None, None)
551
 
552
+ # Convert Base64 to PIL image if provided
553
  if base64_image:
554
  try:
555
  image_data = base64.b64decode(base64_image)
 
559
  elif image is None:
560
  return ("Error: No valid image provided.", None, None, None, None, None, None, None, None)
561
 
562
+ # Validate and preprocess image
563
  image = image.convert("RGB")
564
  img_array = np.array(image)
565
  if img_array.ndim != 3 or img_array.shape[2] != 3:
 
568
  if image.size[0] < 224 or image.size[1] < 224:
569
  return ("Error: Image resolution too low. Minimum 224x224 required.", None, None, None, None, None, None, None, None)
570
 
571
+ # Preprocess image using ViTImageProcessor
572
  inputs = image_processor(images=image, return_tensors="pt")
573
  pixel_values = inputs["pixel_values"].squeeze(0).numpy()
574
  pixel_values = ((pixel_values - pixel_values.min()) * 255 / (pixel_values.max() - pixel_values.min())).astype(np.uint8)
 
578
  image_base64 = base64.b64encode(buffered.getvalue()).decode("utf-8")
579
  payload = {"image": image_base64}
580
 
581
+ # Debug: Log image shapes and payload
582
  logger.info(f"Original Image shape: {img_array.shape}")
583
  logger.info(f"Preprocessed Pixel Values shape: {pixel_values.shape}")
584
 
585
+ # Integrate with Hugging Face and save to Salesforce
586
  updated_id = integrate_hf_with_salesforce(inspection_id, image_base64, tree_type, location, date, region, officer, field_team)
587
  if not updated_id:
588
  return ("Error: Failed to integrate with Hugging Face and save to Salesforce. Check logs for details.", None, None, None, None, None, None, None, None)
589
 
590
+ # Fetch the saved record to get prediction data
591
  if is_online and SALESFORCE_ACCESS_TOKEN:
592
  headers = {"Authorization": f"Bearer {SALESFORCE_ACCESS_TOKEN}", "Content-Type": "application/json"}
593
  get_response = requests.get(f"{SALESFORCE_API_URL}TreeInspection__c/{updated_id}", headers=headers, timeout=10)
 
598
  treatment = record.get("TreatmentPlan__c", "")
599
  fertilizer = record.get("Fertilizer__c", "")
600
  else:
601
+ logger.error(f"Failed to retrieve Salesforce record: {get_response.text}, Status: {get_response.status_code}")
602
  predicted_label = "Unknown"
603
  confidence_score = 0.0
604
  treatment = ""
 
609
  treatment = ""
610
  fertilizer = ""
611
 
612
+ # Convert image to JPEG for Base64 output
613
  buffered_output = BytesIO()
614
  image.save(buffered_output, format="JPEG")
615
  output_base64 = base64.b64encode(buffered_output.getvalue()).decode("utf-8")
616
 
617
+ # Format metadata
618
  metadata_summary = (
619
  f"Tree Type: {tree_type or 'Not provided'}\n"
620
  f"Location: {decrypt_sensitive_data(location_enc) or 'Not provided'}\n"
 
624
  f"Field Team: {field_team or 'Not provided'}"
625
  )
626
 
627
+ # Alert Generation
628
  alert_status = "No alert triggered."
629
  logger.info(f"Checking alert: predicted_label={predicted_label}, confidence_score={confidence_score}")
630
  if predicted_label in critical_diseases or confidence_score < 0.6:
 
645
  prediction_logs.append(case_details)
646
  logger.info(f"Added to prediction_logs: {case_details}")
647
 
648
+ # Ensure prediction_logs has at least one entry for testing
649
  if not prediction_logs:
650
  prediction_logs.append({
651
  "case_id": str(uuid.uuid4()),
 
661
  })
662
  logger.info("Added default entry to prediction_logs")
663
 
664
+ # Save prediction log locally
665
  try:
666
  with open("prediction_logs.json", "w") as f:
667
  json.dump(prediction_logs, f, indent=2)
 
669
  except Exception as e:
670
  logger.error(f"Error saving prediction log: {str(e)}")
671
 
672
+ # Check response time
673
  response_time = time.time() - start_time
674
  if response_time > 5:
675
  logger.warning(f"Response time {response_time:.2f} seconds exceeds 5-second requirement.")
 
693
  if not prediction_logs:
694
  logger.info("No data available for reporting.")
695
  return "Tree Health Report:\n\nNo data available for reporting."
696
+
697
  regions = {}
698
  diseases = {}
699
  confidences = []
700
  fertilizers = {}
701
  critical_alerts = 0
702
+
703
  for log in prediction_logs:
704
  region = log["region"]
705
  disease = log["disease"]
706
  confidence = log["confidence"]
707
  fertilizer = disease_recommendations.get(disease, {}).get("fertilizer", "Unknown")
708
+
709
  regions[region] = regions.get(region, 0) + 1
710
  diseases[disease] = diseases.get(disease, 0) + 1
711
  confidences.append(confidence)
712
  fertilizers[fertilizer] = fertilizers.get(fertilizer, 0) + 1
713
  if disease in critical_diseases or confidence < 0.6:
714
  critical_alerts += 1
715
+
716
  report = "Tree Health Report:\n\n"
717
  report += "1. Tree Inspection Volume by Region (Last 30 Days):\n"
718
  for region, count in regions.items():
719
  report += f" - {region}: {count} inspections\n"
720
+
721
  report += "\n2. Most Frequent Diseases (Last 90 Days):\n"
722
  for disease, count in sorted(diseases.items(), key=lambda x: x[1], reverse=True)[:5]:
723
  report += f" - {disease}: {count} cases\n"
724
+
725
  report += "\n3. Confidence Distribution (Last 30 Days):\n"
726
  confidence_bins = {"0.5-0.7": 0, "0.7-0.9": 0, "0.9-1.0": 0}
727
  for c in confidences:
 
733
  confidence_bins["0.9-1.0"] += 1
734
  for bin_range, count in confidence_bins.items():
735
  report += f" - {bin_range}: {count} cases\n"
736
+
737
  report += "\n4. Fertilizer Usage (Last 60 Days):\n"
738
  for fertilizer, count in sorted(fertilizers.items(), key=lambda x: x[1], reverse=True):
739
  report += f" - {fertilizer}: {count} recommendations\n"
740
+
741
  report += f"\n5. Critical Disease Alerts: {critical_alerts} alerts\n"
742
  logger.info(f"Generated report with {len(regions)} regions, {len(diseases)} diseases")
743
  return report
 
746
  if not prediction_logs:
747
  logger.info("No data available for dashboard.")
748
  return "Tree Health Intelligence Dashboard:\n\nNo data available for dashboard."
749
+
750
  filtered_logs = [log for log in prediction_logs if
751
  (not region_filter or log["region"] == region_filter) and
752
  (not officer_filter or log["officer"] == decrypt_sensitive_data(officer_filter)) and
753
  (not field_team_filter or log["field_team"] == field_team_filter)]
754
+
755
  if not filtered_logs:
756
  logger.info("No data matches the selected filters.")
757
  return "Tree Health Intelligence Dashboard:\n\nNo data matches the selected filters."
758
+
759
  dashboard = "Tree Health Intelligence Dashboard:\n\n"
760
  dashboard += f"Total Entries: {len(filtered_logs)}\n"
761
+
762
  regions = {}
763
  for log in filtered_logs:
764
  regions[log["region"]] = regions.get(log["region"], 0) + 1
765
  dashboard += "\nRegional Activity:\n"
766
  for region, count in regions.items():
767
  dashboard += f" - {region}: {count} inspections\n"
768
+
769
  diseases = {}
770
  for log in filtered_logs:
771
  diseases[log["disease"]] = diseases.get(log["disease"], 0) + 1
772
  dashboard += "\nDisease Patterns:\n"
773
  for disease, count in sorted(diseases.items(), key=lambda x: x[1], reverse=True):
774
  dashboard += f" - {disease}: {count} cases\n"
775
+
776
  confidence_bins = {"0.5-0.7": 0, "0.7-0.9": 0, "0.9-1.0": 0}
777
  for log in filtered_logs:
778
  c = log["confidence"]
 
785
  dashboard += "\nModel Confidence Spread:\n"
786
  for bin_range, count in confidence_bins.items():
787
  dashboard += f" - {bin_range}: {count} cases\n"
788
+
789
  fertilizers = {}
790
  for log in filtered_logs:
791
  fertilizer = disease_recommendations.get(log["disease"], {}).get("fertilizer", "Unknown")
 
793
  dashboard += "\nFertilizer Insights:\n"
794
  for fertilizer, count in sorted(fertilizers.items(), key=lambda x: x[1], reverse=True):
795
  dashboard += f" - {fertilizer}: {count} recommendations\n"
796
+
797
  critical_cases = [log for log in filtered_logs if log["disease"] in critical_diseases or log["confidence"] < 0.6]
798
  dashboard += f"\nCritical Disease Alerts: {len(critical_cases)} alerts\n"
799
  for log in critical_cases[:5]:
800
  dashboard += f" - {log['disease']} (Confidence: {log['confidence']:.2f}) at {log['timestamp']}\n"
801
+
802
  logger.info(f"Generated dashboard with {len(filtered_logs)} entries")
803
  return dashboard
804
 
805
  def offline_queue_processor():
806
+ """Process offline queue and sync logs when online."""
807
  while True:
808
  if check_network_status() and SALESFORCE_ACCESS_TOKEN and HF_API_TOKEN:
809
  sync_offline_logs()
810
  process_offline_queue()
811
  time.sleep(RETRY_INTERVAL)
812
 
813
+ # Initialize Salesforce authentication
 
 
 
 
814
  if not authenticate_salesforce():
815
+ logger.warning("Salesforce authentication failed. Running in offline mode.")
816
+
817
+ # Start offline queue processor
818
+ threading.Thread(target=offline_queue_processor, daemon=True).start()
819
 
820
  iface = gr.Interface(
821
  fn=predict_tree_disease,