Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, HTTPException | |
| from pydantic import BaseModel | |
| from fastapi.middleware.cors import CORSMiddleware | |
| import pystac_client | |
| import stackstac | |
| import xarray as xr | |
| import numpy as np | |
| import rasterio.features | |
| from shapely.geometry import shape, mapping, box, Point | |
| from datetime import date, timedelta | |
| from typing import List, Optional | |
| from skimage import morphology | |
| app = FastAPI(title="GeoView Autonomous Engine") | |
| app.add_middleware( | |
| CORSMiddleware, | |
| allow_origins=["*"], | |
| allow_credentials=True, | |
| allow_methods=["*"], | |
| allow_headers=["*"], | |
| ) | |
| class DetectRequest(BaseModel): | |
| lat: Optional[float] = None | |
| lon: Optional[float] = None | |
| radius: Optional[float] = 0.04 | |
| bbox: Optional[List[float]] = None | |
| mode: Optional[str] = "mining" # mining, environment, autonomous, artisanal | |
| STAC_URL = "https://explorer.digitalearth.africa/stac" | |
| def get_satellite_data(bbox, time_range, collection="s2_l2a", assets=None, resolution=0.0001): | |
| try: | |
| catalog = pystac_client.Client.open(STAC_URL) | |
| search = catalog.search( | |
| collections=[collection], | |
| bbox=bbox, | |
| datetime=time_range, | |
| limit=30 # Increased to 30 to ensure sufficient density for Monthly Binning | |
| ) | |
| items = search.item_collection() | |
| if not items: | |
| return None | |
| # Stacking logic | |
| ds = stackstac.stack( | |
| items, | |
| assets=assets, | |
| bounds_latlon=bbox, | |
| epsg=4326, | |
| resolution=resolution, | |
| chunksize=1024 | |
| ) | |
| return ds | |
| except Exception as e: | |
| print(f"STAC Fetch Error ({collection}): {e}") | |
| return None | |
| def calculate_eds(area_sqm, change_magnitude, type_severity): | |
| """ | |
| Environmental Damage Score (0-100) | |
| Weighted by: Area, Magnitude of Spectral Change, and Type Severity | |
| """ | |
| # Normalize Area (Cap at 50,000 sqm for max score impact) | |
| area_score = min(area_sqm / 50000, 1.0) * 30 | |
| # Normalize Magnitude (0.3 threshold for subtle changes due to cleaner stacking) | |
| mag_score = min(change_magnitude / 0.3, 1.0) * 40 | |
| # Type Severity Base | |
| type_score = type_severity * 30 | |
| total = int(area_score + mag_score + type_score) | |
| return min(total, 100) | |
| def get_eds_category(score): | |
| if score < 20: return "Minor" | |
| if score < 50: return "Moderate" | |
| if score < 75: return "Severe" | |
| return "Critical" | |
| def analyze_sector(bbox, mode="mining", sector_id="MAIN"): | |
| today = date.today() | |
| # Time Ranges | |
| # Deep Baseline: 2020 (Stable past) | |
| baseline_range = "2020-01-01/2020-12-31" | |
| # Current: Last 120 days (4 Months) | |
| current_range = f"{(today - timedelta(days=120)).isoformat()}/{today.isoformat()}" | |
| # Assets including SWIR (B12) for Burn detection | |
| s2_assets = ["B02", "B03", "B04", "B08", "B11", "B12"] | |
| baseline_s2 = get_satellite_data(bbox, baseline_range, "s2_l2a", s2_assets) | |
| current_s2 = get_satellite_data(bbox, current_range, "s2_l2a", s2_assets) | |
| if baseline_s2 is None or current_s2 is None: | |
| return [] | |
| # --- COMPOSITING ENGINE --- | |
| # Baseline: Simple Median of 2020 | |
| base_opt = baseline_s2.median(dim="time").compute() | |
| # Helper to calculate indices | |
| def calc_indices(ds): | |
| # NDVI (Vegetation) | |
| ndvi = (ds.sel(band="B08") - ds.sel(band="B04")) / (ds.sel(band="B08") + ds.sel(band="B04") + 1e-8) | |
| # BSI (Bare Soil) | |
| bsi = ((ds.sel(band="B11") + ds.sel(band="B04")) - (ds.sel(band="B08") + ds.sel(band="B02"))) / \ | |
| ((ds.sel(band="B11") + ds.sel(band="B04")) + (ds.sel(band="B08") + ds.sel(band="B02")) + 1e-8) | |
| # MNDWI (Water) | |
| mndwi = (ds.sel(band="B03") - ds.sel(band="B11")) / (ds.sel(band="B03") + ds.sel(band="B11") + 1e-8) | |
| # NBR (Burn Ratio) - (NIR - SWIR2) / (NIR + SWIR2) | |
| nbr = (ds.sel(band="B08") - ds.sel(band="B12")) / (ds.sel(band="B08") + ds.sel(band="B12") + 1e-8) | |
| return ndvi, bsi, mndwi, nbr | |
| # Baseline Indices | |
| base_ndvi, base_bsi, base_mndwi, base_nbr = calc_indices(base_opt) | |
| # --- LOGIC SWITCH BASED ON MODE --- | |
| features = [] | |
| master_mask = None | |
| curr_opt = None # Only used for transform in artisanal mode if not creating composite object | |
| if mode == "artisanal": | |
| # === MODE 1: ARTISANAL (High Recall / Zero-Miss) === | |
| # Disable Temporal Median Suppression. | |
| # Use Max/Min composites directly from the time stack. | |
| # Calculate indices on the FULL stack first (time, y, x) | |
| full_ndvi, full_bsi, full_mndwi, _ = calc_indices(current_s2) | |
| # Apply Max/Min Compositing | |
| # NDVI: Use MINIMUM over time (capture any vegetation loss) | |
| curr_ndvi = full_ndvi.min(dim="time").compute() | |
| # BSI/MNDWI: Use MAXIMUM over time (capture any soil/water exposure) | |
| curr_bsi = full_bsi.max(dim="time").compute() | |
| curr_mndwi = full_mndwi.max(dim="time").compute() | |
| # Texture Confirmation (Local Variance) | |
| # Check for noise/texture in NDVI using rolling window | |
| ndvi_mean = curr_ndvi.rolling(x=3, y=3, center=True).mean() | |
| ndvi_texture = abs(curr_ndvi - ndvi_mean) | |
| # Ultra-Low Thresholds (Pixel-Level Sensitivity) | |
| ndvi_loss = (base_ndvi - curr_ndvi) > 0.04 | |
| bare_soil = curr_bsi > 0.015 | |
| wet_pit = curr_mndwi > 0.02 | |
| # Artisanal Mask | |
| artisanal_mask = ndvi_loss & (bare_soil | wet_pit) | |
| # Final Mask with Texture Confirmation | |
| final_mask_xr = artisanal_mask & (ndvi_texture > 0.015) | |
| master_mask = final_mask_xr.values.astype(np.uint8) | |
| # IMPORTANT: NO morphological cleaning (Opening/Closing/Erosion) | |
| # Artisanal pits are noise-like and must be preserved. | |
| # Mapping values | |
| master_mask[master_mask == 1] = 10 # Special ID for artisanal | |
| curr_opt = base_opt # Use baseline for transform reference since we didn't make a single curr composite object | |
| else: | |
| # === MODE 2: INDUSTRIAL / ENVIRONMENTAL (Standard) === | |
| # Current: Monthly Median Compositing | |
| monthly_stacks = current_s2.resample(time="1MS").median(dim="time") | |
| curr_opt = monthly_stacks.median(dim="time").compute() | |
| curr_ndvi, curr_bsi, curr_mndwi, curr_nbr = calc_indices(curr_opt) | |
| master_mask = np.zeros_like(curr_ndvi.values, dtype=np.uint8) | |
| if mode == "mining" or mode == "autonomous": | |
| active_mask = ((base_ndvi - curr_ndvi) > 0.15) & (curr_bsi > 0.05) | |
| emerging_mask = (curr_mndwi > 0.05) & (base_mndwi < -0.05) | |
| legacy_mask = (base_bsi > 0.1) & (curr_bsi > 0.1) & (curr_ndvi < 0.3) | |
| master_mask[legacy_mask.values] = 1 | |
| master_mask[emerging_mask.values] = 2 | |
| master_mask[active_mask.values] = 3 | |
| elif mode == "environment" or mode == "autonomous": | |
| burn_mask = (base_nbr - curr_nbr) > 0.2 | |
| drought_mask = ((base_ndvi - curr_ndvi) > 0.1) & ((base_ndvi - curr_ndvi) < 0.3) & (curr_bsi < 0.25) | |
| flood_mask = (curr_mndwi > 0.15) & (base_mndwi < -0.15) | |
| master_mask[drought_mask.values] = 4 | |
| master_mask[burn_mask.values] = 5 | |
| master_mask[flood_mask.values] = 6 | |
| # Standard Cleaning | |
| master_mask = morphology.opening(master_mask, morphology.disk(1)) | |
| # --- FEATURE EXTRACTION --- | |
| transform = curr_opt.transform | |
| shapes = rasterio.features.shapes(master_mask, transform=transform) | |
| for geom, val in shapes: | |
| val = int(val) | |
| if val == 0: continue | |
| poly_shape = shape(geom) | |
| # Dynamic Area Threshold | |
| if mode == "artisanal": | |
| # Micro-Polygons allowed (smaller than 1 pixel) | |
| if poly_shape.area < 0.000000005: continue | |
| else: | |
| if poly_shape.area < 0.00000005: continue | |
| area_sqm = poly_shape.area * 111000 * 111000 | |
| props = { | |
| "detected_on": today.isoformat(), | |
| "area_sqm": int(area_sqm), | |
| "baseline_year": 2020, | |
| "sector_id": sector_id | |
| } | |
| type_severity = 0.5 | |
| change_magnitude = 0.0 | |
| if val == 10: # Artisanal | |
| props["type"] = "artisanal_mining" | |
| # Scale score by area (smaller is harder to detect, but we are biased to high recall) | |
| props["likelihood_score"] = 80 | |
| props["detection_bias"] = "high_recall" | |
| type_severity = 0.9 | |
| change_magnitude = 0.2 # Small but significant | |
| elif val == 1: | |
| props["type"] = "legacy_mining" | |
| props["likelihood_score"] = 85 | |
| type_severity = 0.6 | |
| change_magnitude = 0.1 | |
| elif val == 2: | |
| props["type"] = "emerging_mining" | |
| props["likelihood_score"] = 75 | |
| type_severity = 0.8 | |
| change_magnitude = 0.3 | |
| elif val == 3: | |
| props["type"] = "active_mining" | |
| props["likelihood_score"] = 92 | |
| type_severity = 1.0 | |
| change_magnitude = 0.5 | |
| elif val == 4: | |
| props["type"] = "drought" | |
| props["likelihood_score"] = 65 | |
| type_severity = 0.4 | |
| change_magnitude = 0.15 | |
| elif val == 5: | |
| props["type"] = "wildfire" | |
| props["likelihood_score"] = 88 | |
| type_severity = 0.9 | |
| change_magnitude = 0.6 | |
| elif val == 6: | |
| props["type"] = "flooding" | |
| props["likelihood_score"] = 80 | |
| type_severity = 0.7 | |
| change_magnitude = 0.4 | |
| eds = calculate_eds(area_sqm, change_magnitude, type_severity) | |
| props["eds_score"] = eds | |
| props["eds_category"] = get_eds_category(eds) | |
| props["change_magnitude"] = round(change_magnitude, 2) | |
| features.append({ | |
| "type": "Feature", | |
| "properties": props, | |
| "geometry": geom | |
| }) | |
| return features | |
| async def detect_change(request: DetectRequest): | |
| try: | |
| # Determine BBOX | |
| if request.bbox: | |
| bbox = request.bbox | |
| elif request.lat is not None and request.lon is not None: | |
| bbox = [ | |
| request.lon - request.radius, | |
| request.lat - request.radius, | |
| request.lon + request.radius, | |
| request.lat + request.radius | |
| ] | |
| else: | |
| raise HTTPException(status_code=400, detail="Provide bbox or lat/lon") | |
| all_features = [] | |
| sectors_meta = [] | |
| if request.mode == "autonomous": | |
| # Split BBOX into 3 vertical strips | |
| min_lon, min_lat, max_lon, max_lat = bbox | |
| width = max_lon - min_lon | |
| strip_width = width / 3 | |
| sector_configs = [ | |
| {"id": "A", "name": "Sector Alpha", "bbox": [min_lon, min_lat, min_lon + strip_width, max_lat]}, | |
| {"id": "B", "name": "Sector Bravo", "bbox": [min_lon + strip_width, min_lat, min_lon + (2*strip_width), max_lat]}, | |
| {"id": "C", "name": "Sector Charlie", "bbox": [min_lon + (2*strip_width), min_lat, max_lon, max_lat]} | |
| ] | |
| for sec in sector_configs: | |
| sec_features = analyze_sector(sec["bbox"], mode="autonomous", sector_id=sec["id"]) | |
| all_features.extend(sec_features) | |
| sectors_meta.append({ | |
| "id": sec["id"], | |
| "name": sec["name"], | |
| "bbox": sec["bbox"], | |
| "status": "complete", | |
| "feature_count": len(sec_features) | |
| }) | |
| else: | |
| # Single scan (supports artisanal) | |
| all_features = analyze_sector(bbox, mode=request.mode) | |
| metadata = { | |
| "period": "2020 Baseline vs 120d Window", | |
| "sensors": ["Sentinel-2 L2A"], | |
| "mode": request.mode | |
| } | |
| if request.mode == "artisanal": | |
| metadata["period"] = "2020 Baseline vs Max-Change Composite (No Smoothing)" | |
| metadata["warning"] = "High false-positive rate — artisanal sensitivity mode enabled" | |
| return { | |
| "type": "FeatureCollection", | |
| "features": all_features, | |
| "sectors": sectors_meta, | |
| "metadata": metadata | |
| } | |
| except Exception as e: | |
| import traceback | |
| traceback.print_exc() | |
| return {"type": "FeatureCollection", "features": [], "error": str(e)} | |
| def health_check(): | |
| return {"status": "GeoView Autonomous Engine Online", "version": "3.3.0-artisanal-mode"} | |