shreyankisiri commited on
Commit
9bcc127
Β·
verified Β·
1 Parent(s): 98b6c58

Upload 7 files

Browse files
src/utils/__pycache__/llms.cpython-313.pyc ADDED
Binary file (319 Bytes). View file
 
src/utils/__pycache__/llms.cpython-39.pyc ADDED
Binary file (287 Bytes). View file
 
src/utils/llms.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from langchain_google_genai import ChatGoogleGenerativeAI
2
+ from dotenv import load_dotenv
3
+
4
+ load_dotenv()
5
+
6
+ llm = ChatGoogleGenerativeAI(model = 'gemini-2.0-flash')
src/utils/nodes.py ADDED
@@ -0,0 +1,776 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import requests
2
+ import os
3
+ import gzip
4
+ import numpy as np
5
+ from PIL import Image
6
+ import struct
7
+ from pathlib import Path
8
+ import rasterio
9
+ from rasterio.transform import from_origin
10
+ from geopy.geocoders import Nominatim
11
+ from state import State
12
+
13
+ geolocator = Nominatim(user_agent="lulc-retriever")
14
+
15
+ def get_bbox(place):
16
+ """Get bounding box for a place name"""
17
+ location = geolocator.geocode(place)
18
+ if location is None:
19
+ raise ValueError(f"Could not geocode location: {place}")
20
+
21
+ lat, lon = location.latitude, location.longitude
22
+ buffer = 0.1 # degrees (~10km)
23
+ return (lon - buffer, lat - buffer, lon + buffer, lat + buffer)
24
+
25
+ def download_srtm_hgt(lat, lon, output_dir="dem_tiles"):
26
+ """Download SRTM HGT file"""
27
+ if not os.path.exists(output_dir):
28
+ os.makedirs(output_dir)
29
+
30
+ # Format tile name
31
+ lat_str = f"N{lat:02d}" if lat >= 0 else f"S{abs(lat):02d}"
32
+ lon_str = f"E{lon:03d}" if lon >= 0 else f"W{abs(lon):03d}"
33
+ tile_name = f"{lat_str}{lon_str}.hgt"
34
+
35
+ url = f"https://s3.amazonaws.com/elevation-tiles-prod/skadi/{lat_str}/{tile_name}.gz"
36
+ output_path = os.path.join(output_dir, tile_name)
37
+
38
+ if os.path.exists(output_path):
39
+ return output_path
40
+
41
+ try:
42
+ print(f"Downloading {tile_name}...")
43
+ response = requests.get(url, stream=True)
44
+ response.raise_for_status()
45
+
46
+ gz_path = output_path + ".gz"
47
+ with open(gz_path, 'wb') as f:
48
+ for chunk in response.iter_content(chunk_size=8192):
49
+ f.write(chunk)
50
+
51
+ with gzip.open(gz_path, 'rb') as f_in:
52
+ with open(output_path, 'wb') as f_out:
53
+ f_out.write(f_in.read())
54
+
55
+ os.remove(gz_path)
56
+ print(f"βœ… Downloaded: {tile_name}")
57
+ return output_path
58
+
59
+ except Exception as e:
60
+ print(f"❌ Failed to download {tile_name}: {e}")
61
+ return None
62
+
63
+ def read_hgt_file(hgt_file):
64
+ """Read HGT file and return elevation data with georeferencing"""
65
+
66
+ # Get file size to determine format
67
+ file_size = os.path.getsize(hgt_file)
68
+
69
+ if file_size == 1201 * 1201 * 2: # SRTM1
70
+ size = 1201
71
+ elif file_size == 3601 * 3601 * 2: # SRTM3
72
+ size = 3601
73
+ else:
74
+ # Calculate size
75
+ pixels = file_size // 2
76
+ size = int(np.sqrt(pixels))
77
+ print(f"Auto-detected size: {size}x{size}")
78
+
79
+ # Extract coordinates from filename
80
+ basename = os.path.basename(hgt_file)
81
+ lat_str = basename[:3]
82
+ lon_str = basename[3:7]
83
+
84
+ if lat_str.startswith('N'):
85
+ lat = int(lat_str[1:])
86
+ else:
87
+ lat = -int(lat_str[1:])
88
+
89
+ if lon_str.startswith('E'):
90
+ lon = int(lon_str[1:])
91
+ else:
92
+ lon = -int(lon_str[1:])
93
+
94
+ # Read elevation data
95
+ with open(hgt_file, 'rb') as f:
96
+ data = f.read()
97
+
98
+ # Convert to numpy array (big-endian signed 16-bit)
99
+ elevation_data = np.frombuffer(data, dtype='>i2').reshape(size, size)
100
+
101
+ # Calculate pixel size
102
+ pixel_size = 1.0 / (size - 1)
103
+
104
+ # Georeferencing info
105
+ geotransform = [
106
+ lon, # Top-left X
107
+ pixel_size, # X pixel size
108
+ 0, # X rotation
109
+ lat + 1, # Top-left Y
110
+ 0, # Y rotation
111
+ -pixel_size # Y pixel size (negative because Y decreases)
112
+ ]
113
+
114
+ return elevation_data, geotransform, size
115
+
116
+ def clip_elevation_data(elevation_data, geotransform, size, bbox):
117
+ """Clip elevation data to bounding box"""
118
+
119
+ west, south, east, north = bbox
120
+
121
+ # Calculate pixel coordinates
122
+ top_left_x = geotransform[0]
123
+ top_left_y = geotransform[3]
124
+ pixel_size_x = geotransform[1]
125
+ pixel_size_y = geotransform[5] # This is negative
126
+
127
+ # Convert geographic coordinates to pixel coordinates
128
+ x1 = int((west - top_left_x) / pixel_size_x)
129
+ y1 = int((top_left_y - north) / abs(pixel_size_y))
130
+ x2 = int((east - top_left_x) / pixel_size_x)
131
+ y2 = int((top_left_y - south) / abs(pixel_size_y))
132
+
133
+ # Ensure coordinates are within bounds
134
+ x1 = max(0, min(x1, size - 1))
135
+ y1 = max(0, min(y1, size - 1))
136
+ x2 = max(0, min(x2, size - 1))
137
+ y2 = max(0, min(y2, size - 1))
138
+
139
+ # Clip the data
140
+ clipped_data = elevation_data[y1:y2+1, x1:x2+1]
141
+
142
+ # Update geotransform for clipped data
143
+ new_geotransform = [
144
+ top_left_x + x1 * pixel_size_x, # New top-left X
145
+ pixel_size_x, # X pixel size
146
+ 0, # X rotation
147
+ top_left_y + y1 * pixel_size_y, # New top-left Y
148
+ 0, # Y rotation
149
+ pixel_size_y # Y pixel size
150
+ ]
151
+
152
+ return clipped_data, new_geotransform
153
+
154
+ def save_as_geotiff_basic(elevation_data, geotransform, output_file):
155
+ """Save elevation data as a basic GeoTIFF (requires PIL)"""
156
+
157
+ # Convert to unsigned 16-bit (adding offset to handle negative values)
158
+ min_val = np.min(elevation_data)
159
+ if min_val < 0:
160
+ # Add offset to make all values positive
161
+ offset = abs(min_val)
162
+ adjusted_data = elevation_data + offset
163
+ else:
164
+ offset = 0
165
+ adjusted_data = elevation_data
166
+
167
+ # Convert to uint16
168
+ adjusted_data = adjusted_data.astype(np.uint16)
169
+
170
+ # Save as TIFF
171
+ image = Image.fromarray(adjusted_data, mode='I;16')
172
+ image.save(output_file)
173
+
174
+ # Save metadata separately
175
+ metadata_file = output_file.replace('.tif', '_metadata.txt')
176
+ with open(metadata_file, 'w') as f:
177
+ f.write(f"GeoTransform: {geotransform}\n")
178
+ f.write(f"Offset: {offset}\n")
179
+ f.write(f"Original min value: {min_val}\n")
180
+ f.write(f"Size: {adjusted_data.shape}\n")
181
+
182
+ return output_file, metadata_file
183
+
184
+ def get_dem_elevation_tif(state: State) -> State:
185
+ """
186
+ Download DEM data and save as TIF format in a subdirectory `dem_files`
187
+
188
+ Args:
189
+ state: State object containing bbox, place_name, and working_directory
190
+
191
+ Returns:
192
+ Updated State object with DEM file info
193
+ """
194
+ try:
195
+ state.status = "downloading_dem"
196
+
197
+ # Validate required fields
198
+ if not state.bbox:
199
+ state.error_log.append("Bounding box is required for DEM download")
200
+ state.status = "error"
201
+ return state
202
+
203
+ if not state.place_name:
204
+ state.error_log.append("Place name is required for DEM download")
205
+ state.status = "error"
206
+ return state
207
+
208
+ # Create working & sub-directories
209
+ working_dir = Path(state.working_directory)
210
+ dem_tiles_dir = working_dir / "dem_tiles"
211
+ dem_files_dir = working_dir / "dem_files"
212
+ working_dir.mkdir(parents=True, exist_ok=True)
213
+ dem_tiles_dir.mkdir(parents=True, exist_ok=True)
214
+ dem_files_dir.mkdir(parents=True, exist_ok=True)
215
+
216
+ state.parameters["dem_directory"] = str(dem_files_dir.resolve())
217
+
218
+ west, south, east, north = state.bbox
219
+ place_safe = state.place_name.replace(" ", "_").replace(",", "").replace(".", "")
220
+ output_file = dem_files_dir / f"{place_safe}_dem.tif"
221
+
222
+ print(f"πŸš€ Starting DEM download for {state.place_name}...")
223
+ print(f"πŸ“ Bounding box: {state.bbox}")
224
+ print(f"πŸ“ Output directory: {dem_files_dir}")
225
+
226
+ lat_range = range(int(south), int(north) + 1)
227
+ lon_range = range(int(west), int(east) + 1)
228
+
229
+ all_elevation_data = []
230
+ all_geotransforms = []
231
+ downloaded_tiles = []
232
+
233
+ for lat in lat_range:
234
+ for lon in lon_range:
235
+ hgt_file = download_srtm_hgt(lat, lon, str(dem_tiles_dir))
236
+ if hgt_file:
237
+ try:
238
+ elevation_data, geotransform, size = read_hgt_file(hgt_file)
239
+ clipped_data, clipped_geotransform = clip_elevation_data(
240
+ elevation_data, geotransform, size, state.bbox
241
+ )
242
+ all_elevation_data.append(clipped_data)
243
+ all_geotransforms.append(clipped_geotransform)
244
+ downloaded_tiles.append(os.path.basename(hgt_file))
245
+ print(f"βœ… Processed {os.path.basename(hgt_file)}: {clipped_data.shape}")
246
+ except Exception as e:
247
+ err = f"Error processing {hgt_file}: {e}"
248
+ state.error_log.append(err)
249
+ print(f"❌ {err}")
250
+
251
+ if not all_elevation_data:
252
+ state.error_log.append("No elevation data processed successfully")
253
+ state.status = "error"
254
+ return state
255
+
256
+ print(f"\nπŸ”„ Processing {len(all_elevation_data)} elevation tiles...")
257
+
258
+ if len(all_elevation_data) > 1:
259
+ print("⚠️ Multiple tiles detected. Using first tile only (mosaicking not implemented).")
260
+
261
+ final_data = all_elevation_data[0]
262
+ final_geotransform = all_geotransforms[0]
263
+
264
+ tif_file, metadata_file = save_as_geotiff_basic(
265
+ final_data, final_geotransform, str(output_file)
266
+ )
267
+
268
+ min_elev = float(np.min(final_data))
269
+ max_elev = float(np.max(final_data))
270
+ mean_elev = float(np.mean(final_data))
271
+ shape = final_data.shape
272
+
273
+ state.output_files.append({
274
+ "type": "dem",
275
+ "format": "geotiff",
276
+ "file_path": str(tif_file),
277
+ "metadata_file": str(metadata_file),
278
+ "min_elevation": min_elev,
279
+ "max_elevation": max_elev,
280
+ "mean_elevation": mean_elev,
281
+ "data_shape": shape,
282
+ "downloaded_tiles": downloaded_tiles,
283
+ "bbox": state.bbox,
284
+ "geotransform": final_geotransform
285
+ })
286
+ state.status = "dem_downloaded"
287
+
288
+ print(f"\n🎯 Success! DEM saved to: {tif_file}")
289
+ print(f"πŸ“Š Elevation stats: Min={min_elev}, Max={max_elev}, Mean={mean_elev:.1f} m")
290
+ print(f"πŸ“ Data size: {shape}")
291
+ return state
292
+
293
+ except Exception as e:
294
+ state.error_log.append(f"Unhandled error during DEM download: {e}")
295
+ state.status = "error"
296
+ print(f"❌ {e}")
297
+ return state
298
+
299
+
300
+ def update_dem(filepath,state):
301
+ input_path = filepath
302
+ output_path = filepath
303
+
304
+ # Example: Set CRS and transform manually
305
+ # ⚠️ Replace with correct values for Chennai SRTM if known
306
+ crs = "EPSG:4326" # WGS84 Latitude/Longitude
307
+ transform = from_origin(
308
+ state.bbox[0],
309
+ state.bbox[1],
310
+ 0.0008333, # pixel width (approx 30m resolution)
311
+ 0.0008333 # pixel height (approx 30m resolution)
312
+ )
313
+
314
+ with rasterio.open(input_path) as src:
315
+ profile = src.profile
316
+ data = src.read(1)
317
+
318
+ profile.update({
319
+ 'crs': crs,
320
+ 'transform': transform
321
+ })
322
+
323
+ with rasterio.open(output_path, 'w', **profile) as dst:
324
+ dst.write(data, 1)
325
+
326
+
327
+ import os
328
+ import requests
329
+ import gzip
330
+ import shutil
331
+ from datetime import datetime, timedelta
332
+ from tqdm import tqdm
333
+
334
+ def download_chirps_tif(date: datetime, out_dir="chirps_tifs"):
335
+ y, m, d = date.strftime("%Y"), date.strftime("%m"), date.strftime("%d")
336
+ filename = f"chirps-v2.0.{y}.{m}.{d}.tif"
337
+ url = f"https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/tifs/p25/{y}/{filename}.gz"
338
+
339
+ gz_path = os.path.join(out_dir, filename + ".gz")
340
+ tif_path = os.path.join(out_dir, filename)
341
+
342
+ if os.path.exists(tif_path):
343
+ print(f"βœ… Already downloaded: {filename}")
344
+ return tif_path
345
+
346
+ os.makedirs(out_dir, exist_ok=True)
347
+ r = requests.get(url, stream=True)
348
+ if r.status_code != 200:
349
+ print(f"❌ Failed: {url}")
350
+ return None
351
+
352
+ with open(gz_path, "wb") as f:
353
+ for chunk in r.iter_content(chunk_size=1024):
354
+ if chunk:
355
+ f.write(chunk)
356
+
357
+ with gzip.open(gz_path, "rb") as f_in, open(tif_path, "wb") as f_out:
358
+ shutil.copyfileobj(f_in, f_out)
359
+
360
+ os.remove(gz_path)
361
+ print(f"βœ… Downloaded and extracted: {tif_path}")
362
+ return tif_path
363
+
364
+ def batch_download_chirps(start_date: str, end_date: str, out_dir="chirps_tifs"):
365
+ start = datetime.strptime(start_date, "%Y-%m-%d")
366
+ end = datetime.strptime(end_date, "%Y-%m-%d")
367
+ current = start
368
+ today = datetime.utcnow().date()
369
+ max_available = today - timedelta(days=3)
370
+
371
+ while current <= end:
372
+ if current.date() > max_available:
373
+ print(f"⚠️ Skipping future/unavailable date: {current.strftime('%Y-%m-%d')}")
374
+ else:
375
+ download_chirps_tif(current, out_dir)
376
+ current += timedelta(days=1)
377
+
378
+ from datetime import datetime, timedelta
379
+ from dateutil.relativedelta import relativedelta
380
+
381
+ def get_rainfall_data(state: State):
382
+ print("Fetching rainfall data from same timeframe last year...")
383
+
384
+ today = datetime.today()
385
+
386
+ # Start: (today - 1 year - 7 days)
387
+ start_dt = (today - relativedelta(years=1)) - timedelta(days=7)
388
+
389
+ # End: (today - 1 year)
390
+ end_dt = today - relativedelta(years=1)
391
+
392
+ # Format as strings
393
+ start_date = start_dt.strftime('%Y-%m-%d')
394
+ end_date = end_dt.strftime('%Y-%m-%d')
395
+
396
+ print("Start Date:", start_date)
397
+ print("End Date:", end_date)
398
+
399
+ batch_download_chirps(start_date, end_date, state.working_directory + "/rainfall_data")
400
+ return state
401
+
402
+ from whitebox import WhiteboxTools
403
+ from pathlib import Path
404
+ from dotenv import load_dotenv
405
+ import os
406
+
407
+ load_dotenv()
408
+
409
+ wbt = WhiteboxTools()
410
+ wbt.set_verbose_mode(True)
411
+ wbt.set_compress_rasters(False)
412
+ def run_hydrology_generator(dem_path, output_dir=None):
413
+ # Default to a folder named 'output' if none provided
414
+ if not output_dir or output_dir.strip() == "":
415
+ output_dir = "output"
416
+
417
+ output_dir = Path(output_dir).resolve() # Get absolute path
418
+ output_dir.mkdir(exist_ok=True, parents=True)
419
+
420
+ # Ensure DEM exists
421
+ dem_path = Path(dem_path)
422
+ assert dem_path.exists(), f"❌ DEM not found at {dem_path}"
423
+
424
+ # Use absolute paths for all outputs
425
+ filled_dem = output_dir / "dem_filled.tif"
426
+
427
+ print(f"πŸ“ Output directory: {output_dir}")
428
+ print(f"πŸ“ Output file will be: {filled_dem}")
429
+
430
+ # Rest of your code...
431
+ # Ensure DEM exists
432
+ dem_path = Path(dem_path)
433
+ assert dem_path.exists(), f"❌ DEM not found at {dem_path}"
434
+
435
+ filled_dem = output_dir / "dem_filled.tif"
436
+ filled_dem.parent.mkdir(parents=True, exist_ok=True)
437
+ flow_pointer = output_dir / "flow_dir.tif"
438
+ flow_accum = output_dir / "flow_acc.tif"
439
+ stream_raster = output_dir / "streams.tif"
440
+ slope_path = output_dir / "slope.tif"
441
+ aspect_path = output_dir / "aspect.tif"
442
+
443
+ print("πŸ“ Generating Slope...")
444
+ wbt.slope(dem=str(dem_path), output=str(slope_path), zfactor=1.0)
445
+ assert slope_path.exists(), "❌ Slope file not generated"
446
+
447
+ print("🧭 Generating Aspect...")
448
+ wbt.aspect(dem=str(dem_path), output=str(aspect_path))
449
+ assert aspect_path.exists(), "❌ Aspect file not generated"
450
+
451
+ print("πŸ“₯ Running Fill Depressions...")
452
+ wbt.fill_depressions(dem=str(dem_path), output=str(filled_dem))
453
+ assert filled_dem.exists(), "❌ Filled DEM not generated."
454
+
455
+
456
+ print("πŸ“ˆ Calculating Flow Direction...")
457
+ wbt.d8_pointer(dem=str(filled_dem), output=str(flow_pointer))
458
+ assert flow_pointer.exists(), "❌ Flow direction file not generated."
459
+
460
+ print("🌊 Flow Accumulation...")
461
+ wbt.d8_flow_accumulation(i=str(filled_dem), output=str(flow_accum), out_type="cells")
462
+ assert flow_accum.exists(), "❌ Flow accumulation file not generated."
463
+
464
+ print("🧡 Extracting Streams...")
465
+ wbt.extract_streams(flow_accum=str(flow_accum), output=str(stream_raster), threshold=100)
466
+ assert stream_raster.exists(), "❌ Stream raster not generated."
467
+
468
+ print("βœ… All hydrological outputs generated successfully.")
469
+ return {
470
+ "filled_dem": str(filled_dem),
471
+ "flow_dir": str(flow_pointer),
472
+ "flow_acc": str(flow_accum),
473
+ "streams": str(stream_raster),
474
+ "slope": str(slope_path),
475
+ "aspect": str(aspect_path)
476
+ }
477
+
478
+ import os
479
+ import osmnx as ox
480
+ import geopandas as gpd
481
+
482
+ import os
483
+ import osmnx as ox
484
+ import geopandas as gpd
485
+ import pandas as pd
486
+ from datetime import datetime
487
+
488
+ def fetch_osm_infrastructure(place: str, save_path: str):
489
+ """
490
+ Fetch roads, buildings, schools, hospitals from OSM and save as one GeoJSON.
491
+
492
+ Parameters:
493
+ - place: str β€” e.g., "Bangalore, India"
494
+ - save_path: str β€” Output GeoJSON path
495
+
496
+ Returns: Combined GeoDataFrame
497
+ """
498
+ start = datetime.now()
499
+ print(f"πŸ” Fetching combined OSM infrastructure for: {place}")
500
+ os.makedirs(os.path.dirname(save_path), exist_ok=True)
501
+
502
+ all_gdfs = []
503
+
504
+ feature_tags = {
505
+ "roads": {"highway": True},
506
+ "buildings": {"building": True},
507
+ "schools": {"amenity": "school"},
508
+ "hospitals": {"amenity": "hospital"}
509
+ }
510
+
511
+ for name, tags in feature_tags.items():
512
+ print(f"➑️ Fetching {name}")
513
+ try:
514
+ gdf = ox.features_from_place(place, tags=tags)
515
+ gdf["feature_type"] = name # add a column to indicate the type
516
+ all_gdfs.append(gdf)
517
+ except Exception as e:
518
+ print(f"⚠️ Failed to fetch {name}: {e}")
519
+
520
+ if not all_gdfs:
521
+ raise RuntimeError("No OSM data was fetched.")
522
+
523
+ combined_gdf = pd.concat(all_gdfs, ignore_index=True)
524
+ combined_gdf = gpd.GeoDataFrame(combined_gdf, geometry="geometry", crs="EPSG:4326")
525
+
526
+ combined_gdf.to_file(save_path, driver="GeoJSON")
527
+ print(f"βœ… Combined GeoJSON saved to: {save_path}")
528
+ end = datetime.now()
529
+ print((end-start)*1000)
530
+ return combined_gdf
531
+
532
+ def get_osm_infrastructure(state):
533
+ base_dir = os.path.join(state.working_directory, "OSM_infrastructure")
534
+
535
+ osm = fetch_osm_infrastructure(
536
+ state.place_name,
537
+ os.path.join(base_dir, "OSM.geojson")
538
+ )
539
+
540
+ import osmnx as ox
541
+ import geopandas as gpd
542
+ import pandas as pd
543
+
544
+ def tidal_risk_from_osm(place, buffer_dist=1000, output_geojson="tidal_risk_osm.geojson"):
545
+ print(f"🌍 Fetching OSM water + coastline for {place}")
546
+
547
+ # 1. Get coastlines and water
548
+ coast = ox.features_from_place(place, tags={"natural": "coastline"})
549
+ water = ox.features_from_place(place, tags={"natural": "water"})
550
+
551
+ # 2. Combine and buffer
552
+ coast = coast.to_crs("EPSG:3857")
553
+ water = water.to_crs("EPSG:3857")
554
+ combined = gpd.GeoDataFrame(pd.concat([coast, water], ignore_index=True), crs=coast.crs)
555
+
556
+ print(f"🧱 Found {len(combined)} features. Buffering...")
557
+ risk_zone = combined.buffer(buffer_dist)
558
+ risk_gdf = gpd.GeoDataFrame(geometry=risk_zone, crs="EPSG:3857").dissolve()
559
+ risk_gdf = risk_gdf.to_crs("EPSG:4326")
560
+
561
+ # 3. Save as GeoJSON
562
+ risk_gdf.to_file(output_geojson, driver="GeoJSON")
563
+ print(f"βœ… Saved Tidal Risk GeoJSON: {output_geojson}")
564
+ return output_geojson
565
+
566
+
567
+
568
+ import os
569
+ import numpy as np
570
+ import rasterio
571
+ from rasterio.transform import from_bounds
572
+ from rasterio.crs import CRS
573
+ import osmnx as ox
574
+ import geopandas as gpd
575
+ from shapely.geometry import box
576
+ from scipy.ndimage import distance_transform_edt
577
+
578
+ def get_healthcare_data(bbox, tags):
579
+ minx, miny, maxx, maxy = bbox
580
+ polygon = box(minx, miny, maxx, maxy)
581
+ # Fixed: Use features_from_polygon instead of geometries_from_polygon
582
+ gdf = ox.features_from_polygon(polygon, tags=tags)
583
+ gdf = gdf.to_crs("EPSG:4326")
584
+ gdf["geometry"] = gdf.centroid
585
+ return gdf
586
+
587
+ def rasterize_healthcare_points(bbox, points_gdf, pixel_size=0.0005):
588
+ """Rasterize healthcare points over a bounding box."""
589
+ minx, miny, maxx, maxy = bbox
590
+ width = int((maxx - minx) / pixel_size)
591
+ height = int((maxy - miny) / pixel_size)
592
+ transform = from_bounds(minx, miny, maxx, maxy, width, height)
593
+
594
+ raster = np.zeros((height, width), dtype=np.uint8)
595
+ for point in points_gdf.geometry:
596
+ col, row = ~transform * (point.x, point.y)
597
+ col, row = int(col), int(row)
598
+ if 0 <= row < height and 0 <= col < width:
599
+ raster[row, col] = 1
600
+ return raster, transform
601
+
602
+ def compute_distance_transform(binary_raster, pixel_size_deg):
603
+ """Compute Euclidean distance in meters from healthcare locations."""
604
+ binary_mask = (binary_raster == 0).astype(np.uint8)
605
+ distance_pixels = distance_transform_edt(binary_mask)
606
+ distance_meters = distance_pixels * (111000 * pixel_size_deg)
607
+ return distance_meters
608
+
609
+ def save_distance_raster(distance_raster, transform, output_path, crs="EPSG:4326"):
610
+ """Save distance raster to GeoTIFF."""
611
+ with rasterio.open(
612
+ output_path,
613
+ "w",
614
+ driver="GTiff",
615
+ height=distance_raster.shape[0],
616
+ width=distance_raster.shape[1],
617
+ count=1,
618
+ dtype=distance_raster.dtype,
619
+ crs=CRS.from_string(crs),
620
+ transform=transform,
621
+ ) as dst:
622
+ dst.write(distance_raster, 1)
623
+
624
+ def generate_distance_to_healthcare(bbox, output_path="distance_to_healthcare.tif"):
625
+ """
626
+ Complete tool to generate distance raster to healthcare facilities.
627
+
628
+ Parameters:
629
+ - bbox: [minx, miny, maxx, maxy] for the area of interest
630
+ - output_path: output GeoTIFF path
631
+ """
632
+ print("πŸ” Fetching healthcare data from OpenStreetMap...")
633
+ tags = {"amenity": ["hospital", "clinic", "doctors", "pharmacy"]}
634
+ healthcare_gdf = get_healthcare_data(bbox, tags)
635
+
636
+ print(f"πŸ—Ί Rasterizing {len(healthcare_gdf)} healthcare points...")
637
+ pixel_size = 0.0005
638
+ binary_raster, transform = rasterize_healthcare_points(bbox, healthcare_gdf, pixel_size)
639
+
640
+ print("πŸ“ Computing distance transform...")
641
+ distance_raster = compute_distance_transform(binary_raster, pixel_size)
642
+
643
+ print(f"πŸ’Ύ Saving to {output_path}...")
644
+ save_distance_raster(distance_raster, transform, output_path)
645
+
646
+ print("βœ… Done! Distance raster generated.")
647
+
648
+
649
+ import os
650
+ import geopandas as gpd
651
+ import rasterio
652
+ import matplotlib.pyplot as plt
653
+ from rasterio.plot import show
654
+ from shapely.geometry import box
655
+ import contextily as ctx
656
+
657
+ def visualize_geospatial_file(file_path: str, output_path: str = "output_map.png"):
658
+ """
659
+ Visualizes raster or vector geospatial files and saves the output as an image.
660
+
661
+ Args:
662
+ file_path (str): Path to the GeoTIFF (.tif), GeoJSON, Shapefile, etc.
663
+ output_path (str): Path to save the output image (.png)
664
+ """
665
+
666
+ ext = os.path.splitext(file_path)[1].lower()
667
+
668
+ if ext in [".tif", ".tiff"]:
669
+ with rasterio.open(file_path) as src:
670
+ fig, ax = plt.subplots(figsize=(10, 10))
671
+ show(src, ax=ax, title="Raster Preview")
672
+ ax.set_axis_off()
673
+ plt.plot()
674
+ return output_path
675
+
676
+ elif ext in [".geojson", ".shp", ".gpkg"]:
677
+ gdf = gpd.read_file(file_path)
678
+ fig, ax = plt.subplots(figsize=(10, 10))
679
+ gdf.plot(ax=ax, edgecolor='black', linewidth=0.8, alpha=0.6, color='orange')
680
+
681
+ # Add basemap if projection is set
682
+ if gdf.crs and gdf.crs.to_epsg() == 4326:
683
+ gdf = gdf.to_crs(epsg=3857)
684
+ ctx.add_basemap(ax, source=ctx.providers.Stamen.TonerLite)
685
+
686
+ ax.set_title("Vector Preview")
687
+ ax.set_axis_off()
688
+ plt.plot()
689
+ return output_path
690
+
691
+ else:
692
+ raise ValueError(f"Unsupported file type: {ext}")
693
+
694
+ import osmnx as ox
695
+ import geopandas as gpd
696
+ from shapely.geometry import box
697
+ import numpy as np
698
+ import rasterio
699
+ from rasterio.transform import from_bounds
700
+ from rasterio.crs import CRS
701
+ from scipy.ndimage import distance_transform_edt
702
+
703
+
704
+ def get_infrastructure_gdf(bbox, tags):
705
+ """Fetch infrastructure data using OSM."""
706
+ ox.settings.overpass_endpoint = "https://overpass.kumi.systems/api/interpreter"
707
+ ox.settings.timeout = 60
708
+
709
+ polygon = box(*bbox)
710
+ gdf = ox.features_from_polygon(polygon, tags=tags)
711
+ gdf = gdf.to_crs("EPSG:4326")
712
+ gdf["geometry"] = gdf.centroid
713
+ return gdf
714
+
715
+
716
+ def rasterize_points(gdf, bbox, pixel_size=0.0005):
717
+ minx, miny, maxx, maxy = bbox
718
+ width = int((maxx - minx) / pixel_size)
719
+ height = int((maxy - miny) / pixel_size)
720
+ transform = from_bounds(minx, miny, maxx, maxy, width, height)
721
+
722
+ raster = np.zeros((height, width), dtype=np.uint8)
723
+ for point in gdf.geometry:
724
+ col, row = ~transform * (point.x, point.y)
725
+ col, row = int(col), int(row)
726
+ if 0 <= row < height and 0 <= col < width:
727
+ raster[row, col] = 1
728
+ return raster, transform
729
+
730
+
731
+ def save_raster(raster, transform, output_path, crs="EPSG:4326"):
732
+ with rasterio.open(
733
+ output_path,
734
+ "w",
735
+ driver="GTiff",
736
+ height=raster.shape[0],
737
+ width=raster.shape[1],
738
+ count=1,
739
+ dtype=raster.dtype,
740
+ crs=CRS.from_string(crs),
741
+ transform=transform,
742
+ ) as dst:
743
+ dst.write(raster, 1)
744
+
745
+
746
+ def generate_infrastructure_tif(bbox, output_path="infrastructure.tif", pixel_size=0.0005, distance=False):
747
+ """
748
+ Generate a binary or distance-based infrastructure raster.
749
+ """
750
+ # Define infrastructure tags to fetch
751
+ tags = {
752
+ "highway": True,
753
+ "building": True,
754
+ "bridge": True,
755
+ "railway": True
756
+ }
757
+
758
+ print("πŸ” Fetching infrastructure data...")
759
+ gdf = get_infrastructure_gdf(bbox, tags)
760
+
761
+ print(f"πŸ—Ί Rasterizing {len(gdf)} points...")
762
+ raster, transform = rasterize_points(gdf, bbox, pixel_size)
763
+
764
+ if distance:
765
+ print("πŸ“ Computing distance transform...")
766
+ mask = (raster == 0).astype(np.uint8)
767
+ raster = distance_transform_edt(mask) * (111000 * pixel_size) # meters
768
+
769
+ print(f"πŸ’Ύ Saving raster to {output_path}...")
770
+ save_raster(raster, transform, output_path)
771
+ print("βœ… Done.")
772
+
773
+
774
+ def get_infrastructure(state:State):
775
+ generate_infrastructure_tif(state.bbox)
776
+
src/utils/state.py ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, Field
2
+ from typing import List, Dict, Optional, Any
3
+
4
+ class State(BaseModel):
5
+ """Enhanced state model for geospatial analysis workflow"""
6
+ query: str = Field(description="The initial query sent by the user")
7
+ tasks: List[str] = Field(default=[], description="Detailed breakdown of the tasks")
8
+ output_files: List[Dict[str, str]] = Field(default=[], description="Generated files with metadata")
9
+ bbox: Optional[List[float]] = Field(default=None, description="Bounding box [minx, miny, maxx, maxy]")
10
+ place_name: Optional[str] = Field(default=None, description="Place name for analysis")
11
+ working_directory: str = Field(default="output", description="Working directory for outputs")
12
+ analysis_type: Optional[str] = Field(default=None, description="Type of analysis (flood, slope, etc.)")
13
+ parameters: Dict[str, Any] = Field(default={}, description="Analysis parameters")
14
+ error_log: List[str] = Field(default=[], description="Error messages during processing")
15
+ status: str = Field(default="initialized", description="Current processing status")
src/utils/test.py ADDED
@@ -0,0 +1,814 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel, Field
2
+ from typing import List, Dict, Optional, Any
3
+ import requests
4
+ import os
5
+ import gzip
6
+ import numpy as np
7
+ from PIL import Image
8
+ import struct
9
+ from pathlib import Path
10
+ import rasterio
11
+ from rasterio.transform import from_origin
12
+ from datetime import datetime, timedelta
13
+ from dateutil.relativedelta import relativedelta
14
+ import shutil
15
+ from tqdm import tqdm
16
+ import osmnx as ox
17
+ import geopandas as gpd
18
+ import pandas as pd
19
+ from shapely.geometry import box
20
+ from scipy.ndimage import distance_transform_edt
21
+ from rasterio.transform import from_bounds
22
+ from rasterio.crs import CRS
23
+ from geopy.geocoders import Nominatim
24
+ from whitebox import WhiteboxTools
25
+ from dotenv import load_dotenv
26
+
27
+ load_dotenv()
28
+
29
+ class State(BaseModel):
30
+ """Enhanced state model for geospatial analysis workflow"""
31
+ query: str = Field(description="The initial query sent by the user")
32
+ tasks: List[str] = Field(default=[], description="Detailed breakdown of the tasks")
33
+ output_files: List[Dict[str, str]] = Field(default=[], description="Generated files with metadata")
34
+ bbox: Optional[List[float]] = Field(default=None, description="Bounding box [minx, miny, maxx, maxy]")
35
+ place_name: Optional[str] = Field(default=None, description="Place name for analysis")
36
+ working_directory: str = Field(default="output", description="Working directory for outputs")
37
+ analysis_type: Optional[str] = Field(default=None, description="Type of analysis (flood, slope, etc.)")
38
+ parameters: Dict[str, Any] = Field(default={}, description="Analysis parameters")
39
+ error_log: List[str] = Field(default=[], description="Error messages during processing")
40
+ status: str = Field(default="initialized", description="Current processing status")
41
+
42
+ # Initialize tools
43
+ geolocator = Nominatim(user_agent="lulc-retriever")
44
+ wbt = WhiteboxTools()
45
+ wbt.set_verbose_mode(True)
46
+ wbt.set_compress_rasters(False)
47
+
48
+ def get_bbox(place):
49
+ """Get bounding box for a place name"""
50
+ location = geolocator.geocode(place)
51
+ if location is None:
52
+ raise ValueError(f"Could not geocode location: {place}")
53
+
54
+ lat, lon = location.latitude, location.longitude
55
+ buffer = 0.1 # degrees (~10km)
56
+ return (lon - buffer, lat - buffer, lon + buffer, lat + buffer)
57
+
58
+ def download_srtm_hgt(lat, lon, output_dir):
59
+ """Download SRTM HGT file"""
60
+ if not os.path.exists(output_dir):
61
+ os.makedirs(output_dir)
62
+
63
+ # Format tile name
64
+ lat_str = f"N{lat:02d}" if lat >= 0 else f"S{abs(lat):02d}"
65
+ lon_str = f"E{lon:03d}" if lon >= 0 else f"W{abs(lon):03d}"
66
+ tile_name = f"{lat_str}{lon_str}.hgt"
67
+
68
+ url = f"https://s3.amazonaws.com/elevation-tiles-prod/skadi/{lat_str}/{tile_name}.gz"
69
+ output_path = os.path.join(output_dir, tile_name)
70
+
71
+ if os.path.exists(output_path):
72
+ return output_path
73
+
74
+ try:
75
+ print(f"Downloading {tile_name}...")
76
+ response = requests.get(url, stream=True)
77
+ response.raise_for_status()
78
+
79
+ gz_path = output_path + ".gz"
80
+ with open(gz_path, 'wb') as f:
81
+ for chunk in response.iter_content(chunk_size=8192):
82
+ f.write(chunk)
83
+
84
+ with gzip.open(gz_path, 'rb') as f_in:
85
+ with open(output_path, 'wb') as f_out:
86
+ f_out.write(f_in.read())
87
+
88
+ os.remove(gz_path)
89
+ print(f"βœ… Downloaded: {tile_name}")
90
+ return output_path
91
+
92
+ except Exception as e:
93
+ print(f"❌ Failed to download {tile_name}: {e}")
94
+ return None
95
+
96
+ def read_hgt_file(hgt_file):
97
+ """Read HGT file and return elevation data with georeferencing"""
98
+
99
+ # Get file size to determine format
100
+ file_size = os.path.getsize(hgt_file)
101
+
102
+ if file_size == 1201 * 1201 * 2: # SRTM1
103
+ size = 1201
104
+ elif file_size == 3601 * 3601 * 2: # SRTM3
105
+ size = 3601
106
+ else:
107
+ # Calculate size
108
+ pixels = file_size // 2
109
+ size = int(np.sqrt(pixels))
110
+ print(f"Auto-detected size: {size}x{size}")
111
+
112
+ # Extract coordinates from filename
113
+ basename = os.path.basename(hgt_file)
114
+ lat_str = basename[:3]
115
+ lon_str = basename[3:7]
116
+
117
+ if lat_str.startswith('N'):
118
+ lat = int(lat_str[1:])
119
+ else:
120
+ lat = -int(lat_str[1:])
121
+
122
+ if lon_str.startswith('E'):
123
+ lon = int(lon_str[1:])
124
+ else:
125
+ lon = -int(lon_str[1:])
126
+
127
+ # Read elevation data
128
+ with open(hgt_file, 'rb') as f:
129
+ data = f.read()
130
+
131
+ # Convert to numpy array (big-endian signed 16-bit)
132
+ elevation_data = np.frombuffer(data, dtype='>i2').reshape(size, size)
133
+
134
+ # Calculate pixel size
135
+ pixel_size = 1.0 / (size - 1)
136
+
137
+ # Georeferencing info
138
+ geotransform = [
139
+ lon, # Top-left X
140
+ pixel_size, # X pixel size
141
+ 0, # X rotation
142
+ lat + 1, # Top-left Y
143
+ 0, # Y rotation
144
+ -pixel_size # Y pixel size (negative because Y decreases)
145
+ ]
146
+
147
+ return elevation_data, geotransform, size
148
+
149
+ def clip_elevation_data(elevation_data, geotransform, size, bbox):
150
+ """Clip elevation data to bounding box"""
151
+
152
+ west, south, east, north = bbox
153
+
154
+ # Calculate pixel coordinates
155
+ top_left_x = geotransform[0]
156
+ top_left_y = geotransform[3]
157
+ pixel_size_x = geotransform[1]
158
+ pixel_size_y = geotransform[5] # This is negative
159
+
160
+ # Convert geographic coordinates to pixel coordinates
161
+ x1 = int((west - top_left_x) / pixel_size_x)
162
+ y1 = int((top_left_y - north) / abs(pixel_size_y))
163
+ x2 = int((east - top_left_x) / pixel_size_x)
164
+ y2 = int((top_left_y - south) / abs(pixel_size_y))
165
+
166
+ # Ensure coordinates are within bounds
167
+ x1 = max(0, min(x1, size - 1))
168
+ y1 = max(0, min(y1, size - 1))
169
+ x2 = max(0, min(x2, size - 1))
170
+ y2 = max(0, min(y2, size - 1))
171
+
172
+ # Clip the data
173
+ clipped_data = elevation_data[y1:y2+1, x1:x2+1]
174
+
175
+ # Update geotransform for clipped data
176
+ new_geotransform = [
177
+ top_left_x + x1 * pixel_size_x, # New top-left X
178
+ pixel_size_x, # X pixel size
179
+ 0, # X rotation
180
+ top_left_y + y1 * pixel_size_y, # New top-left Y
181
+ 0, # Y rotation
182
+ pixel_size_y # Y pixel size
183
+ ]
184
+
185
+ return clipped_data, new_geotransform
186
+
187
+ def save_as_geotiff_basic(elevation_data, geotransform, output_file):
188
+ """Save elevation data as a basic GeoTIFF (requires PIL)"""
189
+
190
+ # Convert to unsigned 16-bit (adding offset to handle negative values)
191
+ min_val = np.min(elevation_data)
192
+ if min_val < 0:
193
+ # Add offset to make all values positive
194
+ offset = abs(min_val)
195
+ adjusted_data = elevation_data + offset
196
+ else:
197
+ offset = 0
198
+ adjusted_data = elevation_data
199
+
200
+ # Convert to uint16
201
+ adjusted_data = adjusted_data.astype(np.uint16)
202
+
203
+ # Save as TIFF
204
+ image = Image.fromarray(adjusted_data, mode='I;16')
205
+ image.save(output_file)
206
+
207
+ # Save metadata separately
208
+ metadata_file = output_file.replace('.tif', '_metadata.txt')
209
+ with open(metadata_file, 'w') as f:
210
+ f.write(f"GeoTransform: {geotransform}\n")
211
+ f.write(f"Offset: {offset}\n")
212
+ f.write(f"Original min value: {min_val}\n")
213
+ f.write(f"Size: {adjusted_data.shape}\n")
214
+
215
+ return output_file, metadata_file
216
+
217
+ def get_dem_elevation_tif(state: State) -> State:
218
+ """
219
+ Download DEM data and save as TIF format in dem_files subdirectory
220
+ """
221
+ try:
222
+ state.status = "downloading_dem"
223
+
224
+ # Validate required fields
225
+ if not state.bbox:
226
+ state.error_log.append("Bounding box is required for DEM download")
227
+ state.status = "error"
228
+ return state
229
+
230
+ if not state.place_name:
231
+ state.error_log.append("Place name is required for DEM download")
232
+ state.status = "error"
233
+ return state
234
+
235
+ # Create working & sub-directories
236
+ working_dir = Path(state.working_directory)
237
+ dem_tiles_dir = working_dir / "dem_files" / "dem_tiles"
238
+ dem_files_dir = working_dir / "dem_files"
239
+ working_dir.mkdir(parents=True, exist_ok=True)
240
+ dem_tiles_dir.mkdir(parents=True, exist_ok=True)
241
+ dem_files_dir.mkdir(parents=True, exist_ok=True)
242
+
243
+ state.parameters["dem_directory"] = str(dem_files_dir.resolve())
244
+
245
+ west, south, east, north = state.bbox
246
+ place_safe = state.place_name.replace(" ", "_").replace(",", "").replace(".", "")
247
+ output_file = dem_files_dir / f"{place_safe}_dem.tif"
248
+
249
+ print(f"πŸš€ Starting DEM download for {state.place_name}...")
250
+ print(f"πŸ“ Bounding box: {state.bbox}")
251
+ print(f"πŸ“ Output directory: {dem_files_dir}")
252
+
253
+ lat_range = range(int(south), int(north) + 1)
254
+ lon_range = range(int(west), int(east) + 1)
255
+
256
+ all_elevation_data = []
257
+ all_geotransforms = []
258
+ downloaded_tiles = []
259
+
260
+ for lat in lat_range:
261
+ for lon in lon_range:
262
+ hgt_file = download_srtm_hgt(lat, lon, str(dem_tiles_dir))
263
+ if hgt_file:
264
+ try:
265
+ elevation_data, geotransform, size = read_hgt_file(hgt_file)
266
+ clipped_data, clipped_geotransform = clip_elevation_data(
267
+ elevation_data, geotransform, size, state.bbox
268
+ )
269
+ all_elevation_data.append(clipped_data)
270
+ all_geotransforms.append(clipped_geotransform)
271
+ downloaded_tiles.append(os.path.basename(hgt_file))
272
+ print(f"βœ… Processed {os.path.basename(hgt_file)}: {clipped_data.shape}")
273
+ except Exception as e:
274
+ err = f"Error processing {hgt_file}: {e}"
275
+ state.error_log.append(err)
276
+ print(f"❌ {err}")
277
+
278
+ if not all_elevation_data:
279
+ state.error_log.append("No elevation data processed successfully")
280
+ state.status = "error"
281
+ return state
282
+
283
+ print(f"\nπŸ”„ Processing {len(all_elevation_data)} elevation tiles...")
284
+
285
+ if len(all_elevation_data) > 1:
286
+ print("⚠️ Multiple tiles detected. Using first tile only (mosaicking not implemented).")
287
+
288
+ final_data = all_elevation_data[0]
289
+ final_geotransform = all_geotransforms[0]
290
+
291
+ tif_file, metadata_file = save_as_geotiff_basic(
292
+ final_data, final_geotransform, str(output_file)
293
+ )
294
+
295
+ # Update DEM with proper CRS
296
+ update_dem(tif_file, state)
297
+
298
+ min_elev = float(np.min(final_data))
299
+ max_elev = float(np.max(final_data))
300
+ mean_elev = float(np.mean(final_data))
301
+ shape = final_data.shape
302
+
303
+ state.output_files.append({
304
+ "type": "dem",
305
+ "format": "geotiff",
306
+ "file_path": str(tif_file),
307
+ "metadata_file": str(metadata_file),
308
+ "min_elevation": min_elev,
309
+ "max_elevation": max_elev,
310
+ "mean_elevation": mean_elev,
311
+ "data_shape": shape,
312
+ "downloaded_tiles": downloaded_tiles,
313
+ "bbox": state.bbox,
314
+ "geotransform": final_geotransform
315
+ })
316
+ state.status = "dem_downloaded"
317
+
318
+ print(f"\n🎯 Success! DEM saved to: {tif_file}")
319
+ print(f"πŸ“Š Elevation stats: Min={min_elev}, Max={max_elev}, Mean={mean_elev:.1f} m")
320
+ print(f"πŸ“ Data size: {shape}")
321
+ return state
322
+
323
+ except Exception as e:
324
+ state.error_log.append(f"Unhandled error during DEM download: {e}")
325
+ state.status = "error"
326
+ print(f"❌ {e}")
327
+ return state
328
+
329
+ def update_dem(filepath, state):
330
+ """Update DEM with proper CRS and transform"""
331
+ input_path = filepath
332
+ output_path = filepath
333
+
334
+ crs = "EPSG:4326" # WGS84 Latitude/Longitude
335
+ transform = from_origin(
336
+ state.bbox[0],
337
+ state.bbox[3], # Use north boundary
338
+ 0.0008333, # pixel width (approx 30m resolution)
339
+ 0.0008333 # pixel height (approx 30m resolution)
340
+ )
341
+
342
+ with rasterio.open(input_path) as src:
343
+ profile = src.profile
344
+ data = src.read(1)
345
+
346
+ profile.update({
347
+ 'crs': crs,
348
+ 'transform': transform
349
+ })
350
+
351
+ with rasterio.open(output_path, 'w', **profile) as dst:
352
+ dst.write(data, 1)
353
+
354
+ def download_chirps_tif(date: datetime, out_dir):
355
+ """Download CHIRPS precipitation data"""
356
+ y, m, d = date.strftime("%Y"), date.strftime("%m"), date.strftime("%d")
357
+ filename = f"chirps-v2.0.{y}.{m}.{d}.tif"
358
+ url = f"https://data.chc.ucsb.edu/products/CHIRPS-2.0/global_daily/tifs/p25/{y}/{filename}.gz"
359
+
360
+ gz_path = os.path.join(out_dir, filename + ".gz")
361
+ tif_path = os.path.join(out_dir, filename)
362
+
363
+ if os.path.exists(tif_path):
364
+ print(f"βœ… Already downloaded: {filename}")
365
+ return tif_path
366
+
367
+ os.makedirs(out_dir, exist_ok=True)
368
+ r = requests.get(url, stream=True)
369
+ if r.status_code != 200:
370
+ print(f"❌ Failed: {url}")
371
+ return None
372
+
373
+ with open(gz_path, "wb") as f:
374
+ for chunk in r.iter_content(chunk_size=1024):
375
+ if chunk:
376
+ f.write(chunk)
377
+
378
+ with gzip.open(gz_path, "rb") as f_in, open(tif_path, "wb") as f_out:
379
+ shutil.copyfileobj(f_in, f_out)
380
+
381
+ os.remove(gz_path)
382
+ print(f"βœ… Downloaded and extracted: {tif_path}")
383
+ return tif_path
384
+
385
+ def batch_download_chirps(start_date: str, end_date: str, out_dir):
386
+ """Batch download CHIRPS data"""
387
+ start = datetime.strptime(start_date, "%Y-%m-%d")
388
+ end = datetime.strptime(end_date, "%Y-%m-%d")
389
+ current = start
390
+ today = datetime.utcnow().date()
391
+ max_available = today - timedelta(days=3)
392
+
393
+ while current <= end:
394
+ if current.date() > max_available:
395
+ print(f"⚠️ Skipping future/unavailable date: {current.strftime('%Y-%m-%d')}")
396
+ else:
397
+ download_chirps_tif(current, out_dir)
398
+ current += timedelta(days=1)
399
+
400
+ def get_rainfall_data(state: State) -> State:
401
+ """
402
+ Download rainfall data and save in rainfall_data subdirectory
403
+ """
404
+ try:
405
+ state.status = "downloading_rainfall"
406
+
407
+ # Create rainfall data directory
408
+ working_dir = Path(state.working_directory)
409
+ rainfall_dir = working_dir / "rainfall_data"
410
+ rainfall_dir.mkdir(parents=True, exist_ok=True)
411
+
412
+ print("🌧️ Fetching rainfall data from same timeframe last year...")
413
+
414
+ today = datetime.today()
415
+ # Start: (today - 1 year - 7 days)
416
+ start_dt = (today - relativedelta(years=1)) - timedelta(days=7)
417
+ # End: (today - 1 year)
418
+ end_dt = today - relativedelta(years=1)
419
+
420
+ start_date = start_dt.strftime('%Y-%m-%d')
421
+ end_date = end_dt.strftime('%Y-%m-%d')
422
+
423
+ print(f"πŸ“… Start Date: {start_date}")
424
+ print(f"πŸ“… End Date: {end_date}")
425
+
426
+ batch_download_chirps(start_date, end_date, str(rainfall_dir))
427
+
428
+ # Count downloaded files
429
+ downloaded_files = list(rainfall_dir.glob("*.tif"))
430
+
431
+ state.output_files.append({
432
+ "type": "rainfall",
433
+ "format": "geotiff",
434
+ "directory": str(rainfall_dir),
435
+ "file_count": len(downloaded_files),
436
+ "date_range": f"{start_date} to {end_date}",
437
+ "files": [str(f) for f in downloaded_files]
438
+ })
439
+
440
+ state.status = "rainfall_downloaded"
441
+ print(f"βœ… Downloaded {len(downloaded_files)} rainfall files to {rainfall_dir}")
442
+ return state
443
+
444
+ except Exception as e:
445
+ state.error_log.append(f"Error downloading rainfall data: {e}")
446
+ state.status = "error"
447
+ print(f"❌ {e}")
448
+ return state
449
+
450
+ def run_hydrology_generator(state: State) -> State:
451
+ """
452
+ Run hydrological analysis and save outputs in hydrology_outputs subdirectory
453
+ """
454
+ try:
455
+ state.status = "running_hydrology"
456
+
457
+ # Get DEM file from state
458
+ dem_file = None
459
+ for output in state.output_files:
460
+ if output.get("type") == "dem":
461
+ dem_file = output.get("file_path")
462
+ break
463
+
464
+ if not dem_file:
465
+ state.error_log.append("No DEM file found in state for hydrology analysis")
466
+ state.status = "error"
467
+ return state
468
+
469
+ # Create hydrology outputs directory
470
+ working_dir = Path(state.working_directory)
471
+ hydrology_dir = working_dir / "hydrology_outputs"
472
+ hydrology_dir.mkdir(parents=True, exist_ok=True)
473
+
474
+ # Ensure DEM exists
475
+ dem_path = Path(dem_file)
476
+ if not dem_path.exists():
477
+ state.error_log.append(f"DEM file not found at {dem_path}")
478
+ state.status = "error"
479
+ return state
480
+
481
+ # Define output paths
482
+ filled_dem = hydrology_dir / "dem_filled.tif"
483
+ flow_pointer = hydrology_dir / "flow_dir.tif"
484
+ flow_accum = hydrology_dir / "flow_acc.tif"
485
+ stream_raster = hydrology_dir / "streams.tif"
486
+ slope_path = hydrology_dir / "slope.tif"
487
+ aspect_path = hydrology_dir / "aspect.tif"
488
+
489
+ print(f"πŸ“ Hydrology output directory: {hydrology_dir}")
490
+ print("πŸš€ Starting hydrological analysis...")
491
+
492
+ print("πŸ“ Generating Slope...")
493
+ wbt.slope(dem=str(dem_path), output=str(slope_path), zfactor=1.0)
494
+ if not slope_path.exists():
495
+ raise Exception("Slope file not generated")
496
+
497
+ print("🧭 Generating Aspect...")
498
+ wbt.aspect(dem=str(dem_path), output=str(aspect_path))
499
+ if not aspect_path.exists():
500
+ raise Exception("Aspect file not generated")
501
+
502
+ print("πŸ“₯ Running Fill Depressions...")
503
+ wbt.fill_depressions(dem=str(dem_path), output=str(filled_dem))
504
+ if not filled_dem.exists():
505
+ raise Exception("Filled DEM not generated")
506
+
507
+ print("πŸ“ˆ Calculating Flow Direction...")
508
+ wbt.d8_pointer(dem=str(filled_dem), output=str(flow_pointer))
509
+ if not flow_pointer.exists():
510
+ raise Exception("Flow direction file not generated")
511
+
512
+ print("🌊 Flow Accumulation...")
513
+ wbt.d8_flow_accumulation(i=str(filled_dem), output=str(flow_accum), out_type="cells")
514
+ if not flow_accum.exists():
515
+ raise Exception("Flow accumulation file not generated")
516
+
517
+ print("🧡 Extracting Streams...")
518
+ wbt.extract_streams(flow_accum=str(flow_accum), output=str(stream_raster), threshold=100)
519
+ if not stream_raster.exists():
520
+ raise Exception("Stream raster not generated")
521
+
522
+ hydrology_outputs = {
523
+ "filled_dem": str(filled_dem),
524
+ "flow_dir": str(flow_pointer),
525
+ "flow_acc": str(flow_accum),
526
+ "streams": str(stream_raster),
527
+ "slope": str(slope_path),
528
+ "aspect": str(aspect_path)
529
+ }
530
+
531
+ state.output_files.append({
532
+ "type": "hydrology",
533
+ "format": "geotiff",
534
+ "directory": str(hydrology_dir),
535
+ "outputs": hydrology_outputs
536
+ })
537
+
538
+ state.status = "hydrology_completed"
539
+ print("βœ… All hydrological outputs generated successfully.")
540
+ return state
541
+
542
+ except Exception as e:
543
+ state.error_log.append(f"Error in hydrology analysis: {e}")
544
+ state.status = "error"
545
+ print(f"❌ {e}")
546
+ return state
547
+
548
+ def tidal_risk_from_osm(state: State) -> State:
549
+ """
550
+ Generate tidal risk zones and save in tidal_risk subdirectory
551
+ """
552
+ try:
553
+ state.status = "generating_tidal_risk"
554
+
555
+ # Create tidal risk directory
556
+ working_dir = Path(state.working_directory)
557
+ tidal_dir = working_dir / "tidal_risk"
558
+ tidal_dir.mkdir(parents=True, exist_ok=True)
559
+
560
+ place = state.place_name
561
+ buffer_dist = 1000 # meters
562
+ output_geojson = tidal_dir / f"{place.replace(' ', '_').replace(',', '')}_tidal_risk.geojson"
563
+
564
+ print(f"🌍 Fetching OSM water + coastline for {place}")
565
+
566
+ # Get coastlines and water
567
+ coast = ox.features_from_place(place, tags={"natural": "coastline"})
568
+ water = ox.features_from_place(place, tags={"natural": "water"})
569
+
570
+ # Combine and buffer
571
+ coast = coast.to_crs("EPSG:3857")
572
+ water = water.to_crs("EPSG:3857")
573
+ combined = gpd.GeoDataFrame(pd.concat([coast, water], ignore_index=True), crs=coast.crs)
574
+
575
+ print(f"🧱 Found {len(combined)} features. Buffering...")
576
+ risk_zone = combined.buffer(buffer_dist)
577
+ risk_gdf = gpd.GeoDataFrame(geometry=risk_zone, crs="EPSG:3857").dissolve()
578
+ risk_gdf = risk_gdf.to_crs("EPSG:4326")
579
+
580
+ # Save as GeoJSON
581
+ risk_gdf.to_file(output_geojson, driver="GeoJSON")
582
+
583
+ state.output_files.append({
584
+ "type": "tidal_risk",
585
+ "format": "geojson",
586
+ "file_path": str(output_geojson),
587
+ "buffer_distance": buffer_dist,
588
+ "feature_count": len(combined)
589
+ })
590
+
591
+ state.status = "tidal_risk_completed"
592
+ print(f"βœ… Saved Tidal Risk GeoJSON: {output_geojson}")
593
+ return state
594
+
595
+ except Exception as e:
596
+ state.error_log.append(f"Error generating tidal risk: {e}")
597
+ state.status = "error"
598
+ print(f"❌ {e}")
599
+ return state
600
+
601
+ def get_healthcare_data(bbox, tags):
602
+ """Fetch healthcare data from OSM"""
603
+ minx, miny, maxx, maxy = bbox
604
+ polygon = box(minx, miny, maxx, maxy)
605
+ gdf = ox.features_from_polygon(polygon, tags=tags)
606
+ gdf = gdf.to_crs("EPSG:4326")
607
+ gdf["geometry"] = gdf.centroid
608
+ return gdf
609
+
610
+ def rasterize_healthcare_points(bbox, points_gdf, pixel_size=0.0005):
611
+ """Rasterize healthcare points over a bounding box"""
612
+ minx, miny, maxx, maxy = bbox
613
+ width = int((maxx - minx) / pixel_size)
614
+ height = int((maxy - miny) / pixel_size)
615
+ transform = from_bounds(minx, miny, maxx, maxy, width, height)
616
+
617
+ raster = np.zeros((height, width), dtype=np.uint8)
618
+ for point in points_gdf.geometry:
619
+ col, row = ~transform * (point.x, point.y)
620
+ col, row = int(col), int(row)
621
+ if 0 <= row < height and 0 <= col < width:
622
+ raster[row, col] = 1
623
+ return raster, transform
624
+
625
+ def compute_distance_transform(binary_raster, pixel_size_deg):
626
+ """Compute Euclidean distance in meters from healthcare locations"""
627
+ binary_mask = (binary_raster == 0).astype(np.uint8)
628
+ distance_pixels = distance_transform_edt(binary_mask)
629
+ distance_meters = distance_pixels * (111000 * pixel_size_deg)
630
+ return distance_meters
631
+
632
+ def save_distance_raster(distance_raster, transform, output_path, crs="EPSG:4326"):
633
+ """Save distance raster to GeoTIFF"""
634
+ with rasterio.open(
635
+ output_path,
636
+ "w",
637
+ driver="GTiff",
638
+ height=distance_raster.shape[0],
639
+ width=distance_raster.shape[1],
640
+ count=1,
641
+ dtype=distance_raster.dtype,
642
+ crs=CRS.from_string(crs),
643
+ transform=transform,
644
+ ) as dst:
645
+ dst.write(distance_raster, 1)
646
+
647
+ def generate_distance_to_healthcare(state: State) -> State:
648
+ """
649
+ Generate distance raster to healthcare facilities in healthcare_analysis subdirectory
650
+ """
651
+ try:
652
+ state.status = "generating_healthcare_distance"
653
+
654
+ # Create healthcare analysis directory
655
+ working_dir = Path(state.working_directory)
656
+ healthcare_dir = working_dir / "healthcare_analysis"
657
+ healthcare_dir.mkdir(parents=True, exist_ok=True)
658
+
659
+ output_path = healthcare_dir / "distance_to_healthcare.tif"
660
+
661
+ print("πŸ” Fetching healthcare data from OpenStreetMap...")
662
+ tags = {"amenity": ["hospital", "clinic", "doctors", "pharmacy"]}
663
+ healthcare_gdf = get_healthcare_data(state.bbox, tags)
664
+
665
+ print(f"πŸ—Ί Rasterizing {len(healthcare_gdf)} healthcare points...")
666
+ pixel_size = 0.0005
667
+ binary_raster, transform = rasterize_healthcare_points(state.bbox, healthcare_gdf, pixel_size)
668
+
669
+ print("πŸ“ Computing distance transform...")
670
+ distance_raster = compute_distance_transform(binary_raster, pixel_size)
671
+
672
+ print(f"πŸ’Ύ Saving to {output_path}...")
673
+ save_distance_raster(distance_raster, transform, str(output_path))
674
+
675
+ state.output_files.append({
676
+ "type": "healthcare_distance",
677
+ "format": "geotiff",
678
+ "file_path": str(output_path),
679
+ "healthcare_count": len(healthcare_gdf),
680
+ "pixel_size": pixel_size
681
+ })
682
+
683
+ state.status = "healthcare_distance_completed"
684
+ print("βœ… Done! Distance raster generated.")
685
+ return state
686
+
687
+ except Exception as e:
688
+ state.error_log.append(f"Error generating healthcare distance: {e}")
689
+ state.status = "error"
690
+ print(f"❌ {e}")
691
+ return state
692
+
693
+
694
+
695
+
696
+ import osmnx as ox
697
+ import geopandas as gpd
698
+ from shapely.geometry import box
699
+ import numpy as np
700
+ import rasterio
701
+ from rasterio.transform import from_bounds
702
+ from rasterio.crs import CRS
703
+ from scipy.ndimage import distance_transform_edt
704
+
705
+
706
+ def get_infrastructure_gdf(bbox, tags):
707
+ """Fetch infrastructure data using OSM."""
708
+ ox.settings.overpass_endpoint = "https://overpass.kumi.systems/api/interpreter"
709
+ ox.settings.timeout = 60
710
+
711
+ polygon = box(*bbox)
712
+ gdf = ox.features_from_polygon(polygon, tags=tags)
713
+ gdf = gdf.to_crs("EPSG:4326")
714
+ gdf["geometry"] = gdf.centroid
715
+ return gdf
716
+
717
+
718
+ def rasterize_points(gdf, bbox, pixel_size=0.0005):
719
+ minx, miny, maxx, maxy = bbox
720
+ width = int((maxx - minx) / pixel_size)
721
+ height = int((maxy - miny) / pixel_size)
722
+ transform = from_bounds(minx, miny, maxx, maxy, width, height)
723
+
724
+ raster = np.zeros((height, width), dtype=np.uint8)
725
+ for point in gdf.geometry:
726
+ col, row = ~transform * (point.x, point.y)
727
+ col, row = int(col), int(row)
728
+ if 0 <= row < height and 0 <= col < width:
729
+ raster[row, col] = 1
730
+ return raster, transform
731
+
732
+
733
+ def save_raster(raster, transform, output_path, crs="EPSG:4326"):
734
+ with rasterio.open(
735
+ output_path,
736
+ "w",
737
+ driver="GTiff",
738
+ height=raster.shape[0],
739
+ width=raster.shape[1],
740
+ count=1,
741
+ dtype=raster.dtype,
742
+ crs=CRS.from_string(crs),
743
+ transform=transform,
744
+ ) as dst:
745
+ dst.write(raster, 1)
746
+
747
+
748
+ def generate_infrastructure_tif(bbox, output_path="infrastructure.tif", pixel_size=0.0005, distance=False):
749
+ """
750
+ Generate a binary or distance-based infrastructure raster.
751
+ """
752
+ # Define infrastructure tags to fetch
753
+ tags = {
754
+ "highway": True,
755
+ "building": True,
756
+ "bridge": True,
757
+ "railway": True
758
+ }
759
+
760
+ print("πŸ” Fetching infrastructure data...")
761
+ gdf = get_infrastructure_gdf(bbox, tags)
762
+
763
+ print(f"πŸ—Ί Rasterizing {len(gdf)} points...")
764
+ raster, transform = rasterize_points(gdf, bbox, pixel_size)
765
+
766
+ if distance:
767
+ print("πŸ“ Computing distance transform...")
768
+ mask = (raster == 0).astype(np.uint8)
769
+ raster = distance_transform_edt(mask) * (111000 * pixel_size) # meters
770
+
771
+ print(f"πŸ’Ύ Saving raster to {output_path}...")
772
+ save_raster(raster, transform, output_path)
773
+ print("βœ… Done.")
774
+
775
+
776
+ def get_infrastructure(state:State):
777
+ generate_infrastructure_tif(state.bbox)
778
+
779
+ section_breakdown_template = '''You are a Geospatial AI Agent with expertise in environmental modeling, GIS, and spatial data processing.
780
+ Your job is to break down high-level geospatial analysis queries into a structured list of tasks, with clear descriptions and suggested tools (if any).
781
+
782
+ Each task must be:
783
+
784
+ Self-contained and descriptive
785
+
786
+ Ordered for execution
787
+
788
+ Mapped to an appropriate tool (if known)
789
+
790
+ Ready to be passed to a task executor agent
791
+
792
+ πŸ—‚οΈ Input Example
793
+ User Goal:
794
+
795
+ Analyze flood vulnerability for Chennai using DEM, rainfall, and infrastructure data.
796
+
797
+
798
+
799
+ Tools available:
800
+
801
+ get_dem_elevation_tif: Downloads DEM data which is base for any task
802
+
803
+ run_hydrolysis_tool: Computes slope, flow direction, stream network
804
+
805
+
806
+ get_rainfall_data: Retrieves rainfall from satellite or IMD data
807
+
808
+ OSM_retriever: Downloads Infrastructure,road data for the given place
809
+
810
+ visualize_geospatial_file: Creates maps from raster/vector layers
811
+
812
+ llm: Used for reasoning, summarization, or decision-making
813
+
814
+ '''
src/utils/tools.py ADDED
File without changes