LovnishVerma commited on
Commit
55e7233
·
verified ·
1 Parent(s): de19a51

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +36 -6
app.py CHANGED
@@ -5,6 +5,8 @@ import plotly.graph_objects as go
5
  import numpy as np
6
  import requests
7
  import time
 
 
8
  from datetime import datetime, timedelta
9
 
10
  # 1. PAGE CONFIGURATION
@@ -62,16 +64,32 @@ st.markdown("""
62
  </style>
63
  """, unsafe_allow_html=True)
64
 
65
- # 3. DYNAMIC GEOCODING ENGINE
66
  @st.cache_data(show_spinner=False)
67
  def fetch_coordinates_batch(unique_locations):
68
  """
69
  Fetches coordinates from OpenStreetMap Nominatim API.
 
70
  unique_locations: List of tuples (District, State)
71
  Returns: Dictionary {(District, State): (lat, lon)}
72
  """
73
- # 1. Pre-filled Cache (For speed & redundancy)
74
- coords_map = {
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
75
  ('Gautam Buddha Nagar', 'Uttar Pradesh'): (28.39, 77.65),
76
  ('West Jaintia Hills', 'Meghalaya'): (25.55, 92.38),
77
  ('West Khasi Hills', 'Meghalaya'): (25.56, 91.29),
@@ -91,18 +109,21 @@ def fetch_coordinates_batch(unique_locations):
91
  ('Delhi', 'Delhi'): (28.7041, 77.1025),
92
  ('Shimla', 'Himachal Pradesh'): (31.1048, 77.1734)
93
  }
 
 
94
 
95
- # 2. Identify missing locations
96
  missing_locs = [loc for loc in unique_locations if loc not in coords_map]
97
 
98
  if not missing_locs:
99
  return coords_map
100
 
101
- # 3. Dynamic Fetching for missing
102
  progress_text = "📡 Connecting to Satellite Geocoding API..."
103
  my_bar = st.progress(0, text=progress_text)
104
 
105
  headers = {'User-Agent': 'StarkDashboard/1.0 (Government Research Project)'}
 
106
 
107
  for i, (district, state) in enumerate(missing_locs):
108
  try:
@@ -119,6 +140,7 @@ def fetch_coordinates_batch(unique_locations):
119
  if response.status_code == 200 and response.json():
120
  data = response.json()[0]
121
  coords_map[(district, state)] = (float(data['lat']), float(data['lon']))
 
122
  else:
123
  # Fallback if API fails: Keep existing State Centers logic inside main loop later
124
  pass
@@ -130,6 +152,14 @@ def fetch_coordinates_batch(unique_locations):
130
  continue
131
 
132
  my_bar.empty()
 
 
 
 
 
 
 
 
133
  return coords_map
134
 
135
  # 4. MAIN DATA LOADER
@@ -150,7 +180,7 @@ def load_data():
150
  # Get Unique Locations
151
  unique_locs = list(df[['district', 'state']].drop_duplicates().itertuples(index=False, name=None))
152
 
153
- # Fetch Coordinates (Cached)
154
  coords_db = fetch_coordinates_batch(unique_locs)
155
 
156
  # Fallback Centers (State Capitals)
 
5
  import numpy as np
6
  import requests
7
  import time
8
+ import json
9
+ import os
10
  from datetime import datetime, timedelta
11
 
12
  # 1. PAGE CONFIGURATION
 
64
  </style>
65
  """, unsafe_allow_html=True)
66
 
67
+ # 3. DYNAMIC GEOCODING ENGINE WITH PERSISTENT JSON
68
  @st.cache_data(show_spinner=False)
69
  def fetch_coordinates_batch(unique_locations):
70
  """
71
  Fetches coordinates from OpenStreetMap Nominatim API.
72
+ Uses 'district_coords.json' for persistent storage.
73
  unique_locations: List of tuples (District, State)
74
  Returns: Dictionary {(District, State): (lat, lon)}
75
  """
76
+ json_file = 'district_coords.json'
77
+ coords_map = {}
78
+
79
+ # 1. Load from JSON if exists
80
+ if os.path.exists(json_file):
81
+ try:
82
+ with open(json_file, 'r') as f:
83
+ # Convert string keys "District, State" back to tuple
84
+ loaded_data = json.load(f)
85
+ for k, v in loaded_data.items():
86
+ dist, state = k.split("|")
87
+ coords_map[(dist, state)] = tuple(v)
88
+ except json.JSONDecodeError:
89
+ pass # File corrupted, start fresh
90
+
91
+ # 2. Add Hardcoded Pre-fills (High Priority Redundancy)
92
+ hardcoded_map = {
93
  ('Gautam Buddha Nagar', 'Uttar Pradesh'): (28.39, 77.65),
94
  ('West Jaintia Hills', 'Meghalaya'): (25.55, 92.38),
95
  ('West Khasi Hills', 'Meghalaya'): (25.56, 91.29),
 
109
  ('Delhi', 'Delhi'): (28.7041, 77.1025),
110
  ('Shimla', 'Himachal Pradesh'): (31.1048, 77.1734)
111
  }
112
+ # Update cache with hardcoded values (overrides JSON if conflict, usually better accuracy)
113
+ coords_map.update(hardcoded_map)
114
 
115
+ # 3. Identify missing locations
116
  missing_locs = [loc for loc in unique_locations if loc not in coords_map]
117
 
118
  if not missing_locs:
119
  return coords_map
120
 
121
+ # 4. Dynamic Fetching for missing
122
  progress_text = "📡 Connecting to Satellite Geocoding API..."
123
  my_bar = st.progress(0, text=progress_text)
124
 
125
  headers = {'User-Agent': 'StarkDashboard/1.0 (Government Research Project)'}
126
+ updated = False
127
 
128
  for i, (district, state) in enumerate(missing_locs):
129
  try:
 
140
  if response.status_code == 200 and response.json():
141
  data = response.json()[0]
142
  coords_map[(district, state)] = (float(data['lat']), float(data['lon']))
143
+ updated = True
144
  else:
145
  # Fallback if API fails: Keep existing State Centers logic inside main loop later
146
  pass
 
152
  continue
153
 
154
  my_bar.empty()
155
+
156
+ # 5. Save back to JSON if new data fetched
157
+ if updated:
158
+ # Convert keys to string "District|State" for JSON compatibility
159
+ save_data = {f"{k[0]}|{k[1]}": v for k, v in coords_map.items()}
160
+ with open(json_file, 'w') as f:
161
+ json.dump(save_data, f)
162
+
163
  return coords_map
164
 
165
  # 4. MAIN DATA LOADER
 
180
  # Get Unique Locations
181
  unique_locs = list(df[['district', 'state']].drop_duplicates().itertuples(index=False, name=None))
182
 
183
+ # Fetch Coordinates (Cached + Persistent JSON)
184
  coords_db = fetch_coordinates_batch(unique_locs)
185
 
186
  # Fallback Centers (State Capitals)