|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| import os
|
| import json
|
| import math
|
| from pathlib import Path
|
|
|
| import requests
|
| import ee
|
| import joblib
|
| import numpy as np
|
| import pandas as pd
|
| import matplotlib.pyplot as plt
|
| import gradio as gr
|
|
|
| from huggingface_hub import hf_hub_download
|
|
|
| import geopandas as gpd
|
| from shapely.geometry import box
|
|
|
| import folium
|
| from folium.plugins import Draw, LocateControl
|
| from branca.colormap import linear
|
|
|
|
|
|
|
|
|
|
|
| HF_MODEL_REPO = os.environ.get(
|
| "HF_MODEL_REPO",
|
| "IWMIHQ/soil-moisture-sensor-optimizer-model",
|
| )
|
|
|
| HF_MODEL_FILE = os.environ.get(
|
| "HF_MODEL_FILE",
|
| "extratrees_s1_soil_moisture_points.pkl",
|
| )
|
|
|
| HF_FEATURES_FILE = os.environ.get(
|
| "HF_FEATURES_FILE",
|
| "extratrees_s1_soil_moisture_features.txt",
|
| )
|
|
|
| EXAMPLE_AOI_PATH = "examples/example_field.geojson"
|
|
|
|
|
| def load_model_and_features():
|
| """
|
| Download the ExtraTrees model + feature list from a Hugging Face
|
| model repo, then load them into memory.
|
| """
|
| try:
|
| model_path = hf_hub_download(
|
| repo_id=HF_MODEL_REPO,
|
| filename=HF_MODEL_FILE,
|
| repo_type="model",
|
| )
|
| features_path = hf_hub_download(
|
| repo_id=HF_MODEL_REPO,
|
| filename=HF_FEATURES_FILE,
|
| repo_type="model",
|
| )
|
| except Exception as e:
|
| raise RuntimeError(
|
| "Could not download model files from HF Hub.\n"
|
| f"Repo: {HF_MODEL_REPO}\n"
|
| f"Model file: {HF_MODEL_FILE}\n"
|
| f"Features file: {HF_FEATURES_FILE}\n"
|
| f"Original error: {e}"
|
| )
|
|
|
| model = joblib.load(model_path)
|
| with open(features_path, "r") as f:
|
| feature_cols = [ln.strip() for ln in f.readlines() if ln.strip()]
|
|
|
| print(f"β
Loaded model from {HF_MODEL_REPO}/{HF_MODEL_FILE}")
|
| print(f"β
Loaded {len(feature_cols)} feature names.")
|
| return model, feature_cols
|
|
|
|
|
|
|
| MODEL, FEATURE_COLS = load_model_and_features()
|
|
|
|
|
|
|
|
|
|
|
|
|
| def make_drawer_map_html(center_lat: float = -23.0,
|
| center_lon: float = 30.0,
|
| zoom: int = 7) -> str:
|
| """
|
| Returns a folium map HTML string with:
|
| - OSM basemap (default)
|
| - Esri World Imagery (satellite) basemap
|
| - Draw control (polygon only) with export to GeoJSON
|
| - Geolocation button ("locate me") that auto-switches to satellite
|
| when the location is found.
|
| """
|
| m = folium.Map(
|
| location=[center_lat, center_lon],
|
| zoom_start=zoom,
|
| tiles=None,
|
| control_scale=True,
|
| )
|
|
|
|
|
| osm_layer = folium.TileLayer(
|
| "OpenStreetMap",
|
| name="OpenStreetMap",
|
| control=True,
|
| show=True,
|
| ).add_to(m)
|
|
|
| sat_layer = folium.TileLayer(
|
| tiles=(
|
| "https://services.arcgisonline.com/ArcGIS/rest/services/"
|
| "World_Imagery/MapServer/tile/{z}/{y}/{x}"
|
| ),
|
| attr="Esri, Maxar, Earthstar Geographics",
|
| name="Esri World Imagery",
|
| control=True,
|
| show=False,
|
| ).add_to(m)
|
|
|
|
|
| Draw(
|
| export=True,
|
| filename="aoi.geojson",
|
| position="topleft",
|
| draw_options={
|
| "polyline": False,
|
| "rectangle": False,
|
| "circle": False,
|
| "circlemarker": False,
|
| "marker": False,
|
| "polygon": True,
|
| },
|
| edit_options={
|
| "edit": True,
|
| "remove": True,
|
| },
|
| ).add_to(m)
|
|
|
|
|
| LocateControl(
|
| auto_start=True,
|
| position="topright",
|
| strings={"title": "My location"},
|
| flyTo=True,
|
| keepCurrentZoomLevel=False,
|
| drawCircle=True,
|
| drawMarker=True,
|
| ).add_to(m)
|
|
|
| folium.LayerControl().add_to(m)
|
|
|
|
|
| map_js_name = m.get_name()
|
| osm_js_name = osm_layer.get_name()
|
| sat_js_name = sat_layer.get_name()
|
|
|
| switch_script = f"""
|
| <script>
|
| {map_js_name}.on('locationfound', function(e) {{
|
| try {{
|
| {map_js_name}.removeLayer({osm_js_name});
|
| }} catch (err) {{
|
| console.log('OSM layer remove error:', err);
|
| }}
|
| {map_js_name}.addLayer({sat_js_name});
|
| }});
|
| </script>
|
| """
|
|
|
| m.get_root().html.add_child(folium.Element(switch_script))
|
|
|
| return m._repr_html_()
|
|
|
|
|
| def geocode_place(query: str):
|
| """
|
| Use OpenStreetMap Nominatim to get (lat, lon) from a place name.
|
| """
|
| url = "https://nominatim.openstreetmap.org/search"
|
| params = {"q": query, "format": "json", "limit": 1}
|
| headers = {"User-Agent": "giims-sm-sensor-app/1.0"}
|
|
|
| r = requests.get(url, params=params, headers=headers, timeout=15)
|
| r.raise_for_status()
|
| data = r.json()
|
| if not data:
|
| raise ValueError(f"No results for '{query}'.")
|
| lat = float(data[0]["lat"])
|
| lon = float(data[0]["lon"])
|
| return lat, lon
|
|
|
|
|
| def update_drawer_map(search_query: str) -> str:
|
| """
|
| Gradio callback to refresh the AOI drawer map.
|
| """
|
| if not search_query or not search_query.strip():
|
| return make_drawer_map_html()
|
|
|
| try:
|
| lat, lon = geocode_place(search_query.strip())
|
| html = make_drawer_map_html(center_lat=lat, center_lon=lon, zoom=13)
|
| return html
|
| except Exception as e:
|
| base = make_drawer_map_html()
|
| msg = (
|
| f"<div style='color:#b91c1c;font-size:13px;margin-bottom:4px;'>"
|
| f"Could not find '{search_query}': {e}</div>"
|
| )
|
| return msg + base
|
|
|
|
|
|
|
|
|
|
|
|
|
| SA_EMAIL = os.environ.get(
|
| "EE_SERVICE_ACCOUNT",
|
| "zolokiala@tethys-app-1.iam.gserviceaccount.com",
|
| )
|
| PROJECT_ID = os.environ.get("EE_PROJECT_ID", "tethys-app-1")
|
| EE_KEY_JSON = os.environ.get("EE_SERVICE_ACCOUNT_KEY")
|
|
|
|
|
| def init_earth_engine():
|
| if EE_KEY_JSON is None:
|
| raise RuntimeError(
|
| "EE_SERVICE_ACCOUNT_KEY env var is not set.\n"
|
| "Add EE_SERVICE_ACCOUNT_KEY with the full service-account JSON."
|
| )
|
|
|
| key_path = "/tmp/ee-service-account.json"
|
| if not os.path.exists(key_path):
|
| with open(key_path, "w") as f:
|
| f.write(EE_KEY_JSON)
|
|
|
| from ee import ServiceAccountCredentials
|
|
|
| credentials = ServiceAccountCredentials(SA_EMAIL, key_path)
|
| ee.Initialize(credentials, project=PROJECT_ID)
|
| print(f"β
EE initialized: {SA_EMAIL} | project={PROJECT_ID}")
|
|
|
|
|
| init_earth_engine()
|
|
|
|
|
|
|
|
|
| MAX_DAYS_DIFF = 6
|
| STEP_DAYS = 6
|
| AOI_BUFFER_M = 15000
|
| SCALE = 20
|
|
|
|
|
|
|
|
|
| DEM_COLL = ee.ImageCollection("COPERNICUS/DEM/GLO30")
|
| DEM = DEM_COLL.mosaic()
|
| DEM_ELEV = DEM.select("DEM").rename("elev")
|
| DEM_SLOPE = ee.Terrain.slope(DEM).rename("slope")
|
|
|
|
|
|
|
|
|
| S1_ORBIT_PASS = None
|
|
|
|
|
| def get_s1_collection(aoi, orbit_pass=None):
|
| col = (
|
| ee.ImageCollection("COPERNICUS/S1_GRD")
|
| .filterBounds(aoi)
|
| .filter(ee.Filter.eq("instrumentMode", "IW"))
|
| .filter(
|
| ee.Filter.listContains("transmitterReceiverPolarisation", "VV")
|
| )
|
| .filter(
|
| ee.Filter.listContains("transmitterReceiverPolarisation", "VH")
|
| )
|
| )
|
| if orbit_pass:
|
| col = col.filter(ee.Filter.eq("orbitProperties_pass", orbit_pass))
|
| return col
|
|
|
|
|
|
|
|
|
|
|
|
|
| def make_s1_composites(s1_col, start_date, end_date, step_days=6):
|
| start = ee.Date(start_date)
|
| end = ee.Date(end_date)
|
| n = end.difference(start, "day").divide(step_days).ceil().int()
|
|
|
| empty = (
|
| ee.Image.constant([0, 0, 0])
|
| .rename(["VV", "VH", "angle"])
|
| .updateMask(ee.Image.constant(0))
|
| )
|
|
|
| def make_one(i):
|
| i = ee.Number(i)
|
| d0 = start.advance(i.multiply(step_days), "day")
|
| d1 = d0.advance(step_days, "day")
|
| win = s1_col.filterDate(d0, d1)
|
|
|
| comp = ee.Image(
|
| ee.Algorithms.If(
|
| win.size().gt(0),
|
| win.median().select(["VV", "VH", "angle"]),
|
| empty,
|
| )
|
| )
|
|
|
| mid = d0.advance(ee.Number(step_days).divide(2), "day")
|
|
|
| comp = comp.set(
|
| {
|
| "system:time_start": mid.millis(),
|
| "date": mid.format("YYYY-MM-dd"),
|
| "n_images": win.size(),
|
| }
|
| )
|
| return comp
|
|
|
| comps = ee.ImageCollection(
|
| ee.List.sequence(0, n.subtract(1)).map(make_one)
|
| )
|
| comps = comps.filter(ee.Filter.gt("n_images", 0))
|
| return comps
|
|
|
|
|
|
|
|
|
|
|
|
|
| def fc_to_pandas(fc, force_columns=None):
|
| d = fc.getInfo()
|
| rows = [f.get("properties", {}) for f in d.get("features", [])]
|
| df = pd.DataFrame(rows)
|
| print("Downloaded rows :", len(df))
|
| print("Downloaded columns:", df.columns.tolist())
|
|
|
| if force_columns:
|
| for c in force_columns:
|
| if c not in df.columns:
|
| df[c] = np.nan
|
| print(f"β οΈ Added missing column '{c}' with NaNs.")
|
| return df
|
|
|
|
|
|
|
|
|
|
|
|
|
| def attach_s1_nearest_composite_past_mean_over_cell(fc_cells, s1_comps, max_days_diff=6):
|
| """
|
| Same join logic as before, BUT samples predictors as MEAN over the
|
| grid cell POLYGON (not at centroid point).
|
| """
|
| def add_t(f):
|
| return f.set("t", ee.Date(f.get("date")).millis())
|
|
|
| fc = fc_cells.map(add_t)
|
|
|
| max_diff_ms = max_days_diff * 24 * 60 * 60 * 1000
|
|
|
| diff_filter = ee.Filter.maxDifference(
|
| difference=max_diff_ms,
|
| leftField="t",
|
| rightField="system:time_start",
|
| )
|
|
|
| past_filter = ee.Filter.greaterThanOrEquals(
|
| leftField="t", rightField="system:time_start"
|
| )
|
|
|
| filt = ee.Filter.And(diff_filter, past_filter)
|
|
|
| join = ee.Join.saveBest(matchKey="best_img", measureKey="time_diff")
|
| joined = ee.FeatureCollection(join.apply(fc, s1_comps, filt))
|
|
|
| matched = joined.filter(ee.Filter.notNull(["best_img"]))
|
| unmatched = joined.size().subtract(matched.size())
|
| print(
|
| "π Join matched (server-side):",
|
| matched.size().getInfo(),
|
| "/",
|
| joined.size().getInfo(),
|
| )
|
| print(" Unmatched:", unmatched.getInfo())
|
|
|
| def sample_one(feat):
|
| img = ee.Image(feat.get("best_img"))
|
| full_img = img.addBands(DEM_ELEV).addBands(DEM_SLOPE)
|
|
|
| vals = full_img.reduceRegion(
|
| reducer=ee.Reducer.mean(),
|
| geometry=feat.geometry(),
|
| scale=SCALE,
|
| maxPixels=1e7,
|
| bestEffort=True,
|
| )
|
|
|
| return feat.set(
|
| {
|
| "VV": vals.get("VV"),
|
| "VH": vals.get("VH"),
|
| "angle": vals.get("angle"),
|
| "elev": vals.get("elev"),
|
| "slope": vals.get("slope"),
|
| "comp_date": img.get("date"),
|
| "time_diff_ms": feat.get("time_diff"),
|
| "n_images": img.get("n_images"),
|
| }
|
| )
|
|
|
| sampled = matched.map(sample_one)
|
| got_vv = sampled.filter(ee.Filter.notNull(["VV"])).size()
|
| tot = sampled.size()
|
| print(
|
| "π§ͺ Sampled non-null VV (server-side):",
|
| got_vv.getInfo(),
|
| "/",
|
| tot.getInfo(),
|
| )
|
| return sampled
|
|
|
|
|
|
|
|
|
|
|
|
|
| def build_plot_grid_centroids(date_str, plot_geojson_path, cell_size_m):
|
| """
|
| KEEPING YOUR FUNCTION NAME (so the rest of your code stays the same),
|
| but now it builds GRID CELL POLYGONS and returns them as an EE
|
| FeatureCollection, with 'lon'/'lat' stored as the polygon centroid
|
| for display.
|
|
|
| Soil moisture will be estimated from MEAN predictors over each grid cell.
|
| """
|
| plot_geojson_path = Path(plot_geojson_path)
|
| if not plot_geojson_path.exists():
|
| raise FileNotFoundError(
|
| f"Plot GeoJSON not found at {plot_geojson_path}."
|
| )
|
|
|
| print(f"[READ] {plot_geojson_path}")
|
| aoi = gpd.read_file(plot_geojson_path)
|
|
|
| if aoi.empty:
|
| raise RuntimeError("AOI file has no features.")
|
|
|
| aoi = aoi.dissolve().reset_index(drop=True)
|
|
|
| print("[CRS] Estimating local UTM CRS...")
|
| utm_crs = aoi.estimate_utm_crs()
|
| aoi_utm = aoi.to_crs(utm_crs)
|
|
|
| minx, miny, maxx, maxy = aoi_utm.total_bounds
|
| print(f"[BOUNDS] {minx:.2f}, {miny:.2f}, {maxx:.2f}, {maxy:.2f}")
|
|
|
| n_cols = math.ceil((maxx - minx) / cell_size_m)
|
| n_rows = math.ceil((maxy - miny) / cell_size_m)
|
| print(f"[GRID] rows={n_rows} cols={n_cols} cell_size={cell_size_m} m")
|
|
|
| grid_polys = []
|
| cell_ids = []
|
| for i in range(n_cols):
|
| x0 = minx + i * cell_size_m
|
| x1 = x0 + cell_size_m
|
| for j in range(n_rows):
|
| y0 = miny + j * cell_size_m
|
| y1 = y0 + cell_size_m
|
| grid_polys.append(box(x0, y0, x1, y1))
|
| cell_ids.append(f"{i:04d}_{j:04d}")
|
|
|
| grid = gpd.GeoDataFrame({"cell_id": cell_ids, "geometry": grid_polys}, crs=utm_crs)
|
|
|
| print("[CLIP] Clipping grid to AOI...")
|
| grid_clip = gpd.overlay(grid, aoi_utm, how="intersection")
|
|
|
| if grid_clip.empty:
|
| raise RuntimeError(
|
| f"Clipped grid is empty for cell_size_m={cell_size_m}. "
|
| "Try a larger cell size or check your AOI geometry."
|
| )
|
|
|
| print("[CRS] Reprojecting AOI & grid to EPSG:4326 ...")
|
| aoi_4326 = aoi_utm.to_crs(epsg=4326)
|
| grid_clip_4326 = grid_clip.to_crs(epsg=4326)
|
|
|
| aoi_union = aoi_4326.geometry.unary_union
|
| aoi_geojson = aoi_union.__geo_interface__
|
| geom = ee.Geometry(aoi_geojson)
|
|
|
| features = []
|
| for _, row in grid_clip_4326.iterrows():
|
| poly = row.geometry
|
| if poly is None or poly.is_empty:
|
| continue
|
|
|
| c = poly.centroid
|
| lon = float(c.x)
|
| lat = float(c.y)
|
|
|
| feat = ee.Feature(
|
| ee.Geometry(poly.__geo_interface__),
|
| {
|
| "cell_id": str(row.get("cell_id", "")),
|
| "lon": lon,
|
| "lat": lat,
|
| "date": date_str,
|
| "Sheet": "plot_grid",
|
| },
|
| )
|
| features.append(feat)
|
|
|
| fc_cells = ee.FeatureCollection(features)
|
| print(f"[EE] Built {len(features)} grid CELLS in EE FeatureCollection.")
|
| return fc_cells, geom
|
|
|
|
|
|
|
|
|
|
|
|
|
| def predict_sm_on_grid(date_target, plot_geojson_path, cell_size_m):
|
|
|
| fc_pts, geom = build_plot_grid_centroids(
|
| date_target, plot_geojson_path, cell_size_m
|
| )
|
| n_pts = fc_pts.size().getInfo()
|
| print(f"β
Grid cells inside plot (cell size {cell_size_m} m): {n_pts}")
|
| if n_pts == 0:
|
| raise RuntimeError(
|
| f"No grid cells inside plot for cell_size_m={cell_size_m}.\n"
|
| "Check GeoJSON coordinates and/or reduce cell_size_m."
|
| )
|
|
|
| aoi = geom.buffer(AOI_BUFFER_M)
|
| s1 = get_s1_collection(aoi, S1_ORBIT_PASS)
|
|
|
| start_wide = (
|
| ee.Date(date_target)
|
| .advance(-MAX_DAYS_DIFF, "day")
|
| .format("YYYY-MM-dd")
|
| .getInfo()
|
| )
|
| end_wide = ee.Date(date_target).format("YYYY-MM-dd").getInfo()
|
| print("π
Wide S1 date range (map):", start_wide, "to", end_wide)
|
|
|
| s1_period = s1.filterDate(start_wide, end_wide)
|
| n_s1 = s1_period.size().getInfo()
|
| print("π°οΈ S1 images in WIDE range (map):", n_s1)
|
| if n_s1 == 0:
|
| raise RuntimeError(
|
| f"No S1 images in map period for this AOI (cell_size_m={cell_size_m}). "
|
| "Try another date or expand range."
|
| )
|
|
|
| comps = make_s1_composites(s1_period, start_wide, end_wide, STEP_DAYS)
|
| n_comps = comps.size().getInfo()
|
| print("π§± Composites kept (non-empty, map):", n_comps)
|
| if n_comps == 0:
|
| raise RuntimeError(
|
| f"No non-empty composites for map inference (cell_size_m={cell_size_m}). "
|
| "Try a larger STEP_DAYS or date window."
|
| )
|
|
|
|
|
| fc_pts_s1 = attach_s1_nearest_composite_past_mean_over_cell(
|
| fc_pts, comps, MAX_DAYS_DIFF
|
| )
|
| n_pts_s1 = fc_pts_s1.size().getInfo()
|
| print(f"β
Grid cells with S1 match: {n_pts_s1} / {n_pts}")
|
| if n_pts_s1 == 0:
|
| raise RuntimeError(
|
| "No grid cells could be matched to a Sentinel-1 composite in the past-only join."
|
| )
|
|
|
| df = fc_to_pandas(
|
| fc_pts_s1,
|
| force_columns=["VV", "VH", "angle", "elev", "slope", "lon", "lat"],
|
| )
|
|
|
| if len(df) == 0:
|
| raise RuntimeError("Joined dataframe is empty (no rows).")
|
|
|
| for col in ["VV", "VH", "angle"]:
|
| df[col] = pd.to_numeric(df[col], errors="coerce")
|
|
|
| df["VV_VH_ratio"] = df["VV"] / df["VH"]
|
| df["VV_minus_VH"] = df["VV"] - df["VH"]
|
| df["VV_plus_VH"] = df["VV"] + df["VH"]
|
| df["VV_dB"] = 10.0 * np.log10(df["VV"] + 1e-6)
|
| df["VH_dB"] = 10.0 * np.log10(df["VH"] + 1e-6)
|
|
|
| if "time_diff_ms" in df.columns:
|
| df["time_diff_days"] = pd.to_numeric(
|
| df["time_diff_ms"], errors="coerce"
|
| ) / (1000.0 * 60.0 * 60.0 * 24.0)
|
| if "n_images" in df.columns:
|
| df["n_images"] = pd.to_numeric(df["n_images"], errors="coerce")
|
|
|
| for col in ["elev", "slope"]:
|
| if col in df.columns:
|
| df[col] = pd.to_numeric(df[col], errors="coerce")
|
|
|
| model = MODEL
|
| feature_cols = FEATURE_COLS
|
|
|
| for col in feature_cols:
|
| if col not in df.columns:
|
| df[col] = np.nan
|
| print(
|
| f"β οΈ Added missing feature column '{col}' with NaNs for map inference."
|
| )
|
| df[col] = pd.to_numeric(df[col], errors="coerce")
|
| med = df[col].median()
|
| df[col] = df[col].fillna(med)
|
|
|
| X = df[feature_cols].values
|
| if X.shape[0] == 0:
|
| raise RuntimeError("No samples available for prediction (X has 0 rows).")
|
|
|
| df["sm_pred"] = model.predict(X)
|
|
|
| mean_sm = df["sm_pred"].mean()
|
| std_sm = df["sm_pred"].std(ddof=1)
|
| cv_pct = (std_sm / mean_sm) * 100 if mean_sm != 0 else np.nan
|
|
|
| print("\n=== SOIL MOISTURE UNIFORMITY (GRID CELLS) ===")
|
| print(" (SM predicted from MEAN predictors over each grid cell polygon)")
|
| print(f"Date : {date_target}")
|
| print(f"Cell size : {cell_size_m} m")
|
| print(f"Mean SM : {mean_sm:.2f}")
|
| print(f"Std SM : {std_sm:.2f}")
|
| print(f"CV (percent): {cv_pct:.1f}%")
|
| print(f"N cells : {len(df)}")
|
|
|
| map_csv = f"sm_map_{date_target}_grid_{cell_size_m}m.csv"
|
| keep_cols = []
|
| for col in [
|
| "date",
|
| "lat",
|
| "lon",
|
| "elev",
|
| "slope",
|
| "VV",
|
| "VH",
|
| "angle",
|
| "sm_pred",
|
| "comp_date",
|
| "time_diff_days",
|
| "n_images",
|
| ]:
|
| if col in df.columns and col not in keep_cols:
|
| keep_cols.append(col)
|
|
|
| out = df[keep_cols].copy()
|
| out.to_csv(map_csv, index=False)
|
| print("πΎ Saved grid-cell map CSV:", map_csv)
|
| print(" Rows (grid cells):", len(out))
|
|
|
| return cv_pct, out, geom
|
|
|
|
|
|
|
|
|
|
|
|
|
| def run_sensor_optimization(date_target, geojson_file, cell_sizes_str):
|
| if geojson_file is None:
|
| msg = (
|
| "<b>Provide a field AOI.</b> Upload a Polygon/MultiPolygon GeoJSON (EPSG:4326), "
|
| "or use the AOI drawer to draw, export & upload."
|
| )
|
| raise gr.Error(msg)
|
|
|
| plot_geojson_path = str(geojson_file)
|
|
|
| try:
|
| cell_sizes = [int(s.strip()) for s in cell_sizes_str.split(",") if s.strip()]
|
| except Exception:
|
| raise gr.Error(
|
| "Could not parse grid sizes. Use a comma-separated list, e.g. '5,10,20,30'."
|
| )
|
|
|
| cvs = []
|
| n_sensors = []
|
| used_cell_sizes = []
|
|
|
| for cell_size in cell_sizes:
|
| print("\n" + "=" * 60)
|
| print(f"π§ Running grid size {cell_size} m ...")
|
| try:
|
| cv_pct, df_grid, _geom = predict_sm_on_grid(
|
| date_target, plot_geojson_path, cell_size
|
| )
|
| cvs.append(cv_pct)
|
| n_sensors.append(len(df_grid))
|
| used_cell_sizes.append(cell_size)
|
| except Exception as e:
|
| print(f"β οΈ Skipping cell_size={cell_size} due to error: {e}")
|
|
|
| if len(cvs) == 0:
|
| raise gr.Error(
|
| "All grid sizes failed. Check date, GeoJSON, or model availability."
|
| )
|
|
|
| summary_df = (
|
| pd.DataFrame(
|
| {
|
| "cell_size_m": used_cell_sizes,
|
| "n_sensors": n_sensors,
|
| "cv_percent": cvs,
|
| }
|
| )
|
| .sort_values("n_sensors")
|
| .reset_index(drop=True)
|
| )
|
|
|
|
|
| CV_TOLERANCE = 2.0
|
|
|
| min_cv = float(summary_df["cv_percent"].min())
|
| candidates = summary_df[summary_df["cv_percent"] <= min_cv + CV_TOLERANCE].copy()
|
|
|
| best_row = candidates.sort_values("n_sensors").iloc[0]
|
|
|
| opt_n = int(best_row["n_sensors"])
|
| opt_cv = float(best_row["cv_percent"])
|
| opt_cell = int(best_row["cell_size_m"])
|
|
|
| print("\n[OPTIMAL GRID SELECTION]")
|
| print(f" Min CV overall : {min_cv:.3f} %")
|
| print(f" CV tolerance : Β±{CV_TOLERANCE:.3f} %")
|
| print(" Candidate grids (within tolerance):")
|
| print(candidates)
|
| print(
|
| f" β Chosen grid: cell_size={opt_cell} m, "
|
| f"n_sensors={opt_n}, cv={opt_cv:.3f} %"
|
| )
|
|
|
|
|
| fig, ax = plt.subplots(figsize=(6, 4))
|
| ax.plot(summary_df["n_sensors"], summary_df["cv_percent"], marker="o")
|
| ax.set_xlabel("Number of sensors (N grid cells)")
|
| ax.set_ylabel("CV of soil moisture (%)")
|
| ax.set_title(f"CV vs Number of Sensors β {date_target}")
|
| ax.grid(True, alpha=0.3)
|
|
|
| ax.scatter([opt_n], [opt_cv], s=120, marker="*", edgecolor="black")
|
| ax.annotate(
|
| f"Optimal\nN={opt_n}\nCV={opt_cv:.1f}%",
|
| xy=(opt_n, opt_cv),
|
| xytext=(5, 5),
|
| textcoords="offset points",
|
| fontsize=9,
|
| bbox=dict(boxstyle="round,pad=0.3", fc="white", alpha=0.8),
|
| )
|
|
|
| ax.text(
|
| 0.99,
|
| 0.01,
|
| f"Optimal grid β {opt_cell} m",
|
| transform=ax.transAxes,
|
| ha="right",
|
| va="bottom",
|
| fontsize=8,
|
| bbox=dict(boxstyle="round,pad=0.3", fc="white", alpha=0.6),
|
| )
|
|
|
| plt.tight_layout()
|
|
|
| return fig, summary_df
|
|
|
|
|
|
|
|
|
|
|
|
|
| def show_centroid_map(date_target, geojson_file, cell_size_m):
|
| """
|
| Build grid for a single cell size, run the model (MEAN-over-cell),
|
| and render:
|
| - SM basemap (colored rectangles; uses centroid lon/lat for display)
|
| - Red sensor locations on top
|
| - Table of coordinates + predicted SM
|
| """
|
| empty = pd.DataFrame(
|
| columns=["sensor_id", "Longitude (Β°E)", "Latitude (Β°S)", "sm_pred"]
|
| )
|
|
|
| if geojson_file is None:
|
| msg = (
|
| "<i>Please upload a field GeoJSON first, then click "
|
| "<b>Show centroid map</b>.</i>"
|
| )
|
| return msg, empty
|
|
|
| plot_geojson_path = str(geojson_file)
|
|
|
| try:
|
| cell_size_m = int(cell_size_m)
|
| except Exception:
|
| msg = "<i>Cell size must be a single integer (e.g. 10, 20, 30).</i>"
|
| return msg, empty
|
|
|
|
|
| try:
|
| cv_pct, df_sm, geom = predict_sm_on_grid(
|
| date_target, plot_geojson_path, cell_size_m
|
| )
|
| except Exception as e:
|
| msg = (
|
| f"<i>Could not build SM map for this configuration: {e}</i>"
|
| )
|
| return msg, empty
|
|
|
| n_pts = len(df_sm)
|
| if n_pts == 0:
|
| msg = (
|
| f"<i>No grid cells inside the plot for cell_size_m={cell_size_m} m. "
|
| "Try a smaller cell size or check your GeoJSON.</i>"
|
| )
|
| return msg, empty
|
|
|
| print(f"πΊοΈ Preview map: {n_pts} grid cells for cell size {cell_size_m} m")
|
|
|
| centroid = geom.centroid().coordinates().getInfo()
|
| lon_c, lat_c = centroid[0], centroid[1]
|
|
|
| m = folium.Map(
|
| location=[lat_c, lon_c],
|
| zoom_start=16,
|
| tiles=None,
|
| control_scale=True,
|
| )
|
| folium.TileLayer(
|
| tiles="https://services.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}",
|
| attr="Esri, Maxar, Earthstar Geographics",
|
| name="Esri World Imagery",
|
| show=True,
|
| ).add_to(m)
|
| folium.TileLayer("OpenStreetMap", name="OpenStreetMap", show=False).add_to(m)
|
|
|
|
|
| try:
|
| with open(plot_geojson_path, "r") as f:
|
| gj = json.load(f)
|
| folium.GeoJson(
|
| gj,
|
| name="Field polygon",
|
| style_function=lambda x: {
|
| "color": "#10b981",
|
| "weight": 2,
|
| "fillOpacity": 0.05,
|
| },
|
| ).add_to(m)
|
| except Exception as e:
|
| print("β οΈ Could not add field polygon to map:", e)
|
|
|
|
|
| df_sm = df_sm.copy()
|
| df_sm["lon"] = pd.to_numeric(df_sm["lon"], errors="coerce")
|
| df_sm["lat"] = pd.to_numeric(df_sm["lat"], errors="coerce")
|
| df_sm["sm_pred"] = pd.to_numeric(df_sm["sm_pred"], errors="coerce")
|
|
|
| sm_min = float(df_sm["sm_pred"].min())
|
| sm_max = float(df_sm["sm_pred"].max())
|
| if sm_min == sm_max:
|
| sm_min -= 0.5
|
| sm_max += 0.5
|
|
|
| colormap = linear.viridis.scale(sm_min, sm_max)
|
| colormap.caption = "Predicted soil moisture (%)"
|
| colormap.add_to(m)
|
|
|
| rect_group = folium.FeatureGroup(name="SM basemap")
|
| for _, row in df_sm.iterrows():
|
| lat = row["lat"]
|
| lon = row["lon"]
|
| sm = row["sm_pred"]
|
|
|
| if np.isnan(lat) or np.isnan(lon) or np.isnan(sm):
|
| continue
|
|
|
| cell_deg_lat = cell_size_m / 111_320.0
|
| lat_rad = math.radians(lat)
|
| cell_deg_lon = (cell_size_m / 111_320.0) / max(math.cos(lat_rad), 1e-6)
|
|
|
| half_lat = cell_deg_lat / 2.0
|
| half_lon = cell_deg_lon / 2.0
|
|
|
| bounds = [
|
| [lat - half_lat, lon - half_lon],
|
| [lat + half_lat, lon + half_lon],
|
| ]
|
|
|
| folium.Rectangle(
|
| bounds=bounds,
|
| fill=True,
|
| fill_color=colormap(sm),
|
| fill_opacity=0.8,
|
| stroke=False,
|
| ).add_to(rect_group)
|
|
|
| rect_group.add_to(m)
|
|
|
|
|
| df_coords = df_sm[["lon", "lat", "sm_pred"]].copy()
|
| df_coords["lon"] = df_coords["lon"].round(6)
|
| df_coords["lat"] = df_coords["lat"].round(6)
|
| df_coords.insert(0, "sensor_id", np.arange(1, len(df_coords) + 1))
|
| df_coords.rename(
|
| columns={"lon": "Longitude (Β°E)", "lat": "Latitude (Β°S)"}, inplace=True
|
| )
|
|
|
| points_group = folium.FeatureGroup(name=f"Centroids ({n_pts} sensors)")
|
| for _, row in df_coords.iterrows():
|
| folium.CircleMarker(
|
| location=[row["Latitude (Β°S)"], row["Longitude (Β°E)"]],
|
| radius=5,
|
| color="#ef4444",
|
| weight=1,
|
| fill=True,
|
| fill_color="#ef4444",
|
| fill_opacity=0.95,
|
| popup=(
|
| f"id={int(row['sensor_id'])}<br>"
|
| f"SM={row['sm_pred']:.2f} %<br>"
|
| f"lon={row['Longitude (Β°E)']}, lat={row['Latitude (Β°S)']}"
|
| ),
|
| ).add_to(points_group)
|
|
|
| points_group.add_to(m)
|
|
|
| legend_html = """
|
| <div style="
|
| position: fixed;
|
| bottom: 20px;
|
| left: 20px;
|
| z-index: 9999;
|
| background: rgba(15,23,42,0.85);
|
| color: #f9fafb;
|
| padding: 8px 12px;
|
| border-radius: 8px;
|
| font-size: 12px;
|
| box-shadow: 0 2px 6px rgba(0,0,0,0.3);
|
| ">
|
| <b>Map features</b><br>
|
| <span style="display:inline-block;width:10px;height:10px;
|
| border-radius:50%;background:#ef4444;margin-right:4px;"></span>
|
| Soil moisture sensors (grid cell centroids)
|
| </div>
|
| """
|
| m.get_root().html.add_child(folium.Element(legend_html))
|
|
|
| folium.LayerControl().add_to(m)
|
|
|
| map_html = m._repr_html_()
|
| return map_html, df_coords
|
|
|
|
|
|
|
|
|
|
|
|
|
| def load_example_aoi():
|
| if not os.path.exists(EXAMPLE_AOI_PATH):
|
| raise gr.Error(
|
| f"Example AOI not found at '{EXAMPLE_AOI_PATH}'. "
|
| "Make sure the file exists in your repo."
|
| )
|
| return EXAMPLE_AOI_PATH
|
|
|
|
|
|
|
|
|
|
|
|
|
| theme = gr.themes.Soft(
|
| primary_hue="teal", secondary_hue="cyan", neutral_hue="slate"
|
| )
|
|
|
| with gr.Blocks(
|
| theme=theme,
|
| css="""
|
| .gradio-container {
|
| max-width: 1080px !important;
|
| margin: 0 auto !important;
|
| }
|
| #sm-header h1 {
|
| text-align: center;
|
| }
|
| #sm-header p {
|
| text-align: center;
|
| font-size: 0.95rem;
|
| }
|
| .small-note {
|
| font-size: 0.78rem;
|
| opacity: 0.8;
|
| }
|
| """,
|
| ) as demo:
|
|
|
| with gr.Column(elem_id="sm-header"):
|
| gr.Markdown(
|
| """
|
| # π± Soil Moisture Sensor Optimization
|
| **Sentinel-1 + ExtraTrees β Field-scale sensor planning**
|
|
|
| Upload or draw a field polygon, explore different grid sizes, and find the number of
|
| soil moisture sensors that minimises spatial variability (CV%).
|
| """
|
| )
|
|
|
| with gr.Row():
|
|
|
| with gr.Column(scale=1):
|
| gr.Markdown("### π₯ Inputs")
|
|
|
| date_input = gr.Textbox(
|
| label="Target date (YYYY-MM-DD)",
|
| value="2025-10-17",
|
| info=(
|
| "Date of interest for soil moisture mapping "
|
| "(must overlap Sentinel-1 coverage)."
|
| ),
|
| placeholder="e.g. 2025-10-17",
|
| )
|
|
|
| cell_sizes_input = gr.Textbox(
|
| label="Grid cell sizes for optimization (m, comma-separated)",
|
| value="5,10,20,30",
|
| info="Each value defines a regular grid (cell size in metres) over your field.",
|
| placeholder="5,10,20,30",
|
| )
|
|
|
| geojson_input = gr.File(
|
| label="Field polygon (GeoJSON; EPSG:4326, Polygon/MultiPolygon)",
|
| file_types=[".geojson"],
|
| file_count="single",
|
| type="filepath",
|
| )
|
|
|
| example_button = gr.Button(
|
| "π Load example AOI",
|
| variant="secondary",
|
| )
|
|
|
| with gr.Accordion(
|
| "Draw / Search AOI (folium Draw) β export & upload here", open=False
|
| ):
|
| search_box = gr.Textbox(
|
| label="Search place (optional)",
|
| placeholder="e.g. Groblersdal, South Africa",
|
| info=(
|
| "Type a place name and click 'Search & update AOI map' "
|
| "to centre the AOI drawer."
|
| ),
|
| )
|
| search_button = gr.Button("π Search & update AOI map")
|
|
|
| drawer_map_html = gr.HTML(
|
| value=make_drawer_map_html(), label="AOI drawer map"
|
| )
|
|
|
| gr.Markdown(
|
| """
|
| <div class="small-note">
|
| 1. Use the search box above or just pan/zoom on the map.<br>
|
| 2. Let the geolocation button find you, or navigate manually.<br>
|
| 3. Draw a polygon with the draw tools (top-left).<br>
|
| 4. Use the <b>Export</b> button in the draw toolbar to download <code>aoi.geojson</code>.<br>
|
| 5. Upload that file in the <b>Field polygon</b> input above β or click <b>Load example AOI</b>.
|
| </div>
|
| """,
|
| elem_classes=["small-note"],
|
| )
|
|
|
| run_button = gr.Button("βΆ Run sensor optimization", variant="primary")
|
|
|
| gr.Markdown(
|
| """
|
| <div class="small-note">
|
| π‘ <b>Quick start:</b> Click <b>Load example AOI</b> β run optimization.
|
| Or: Search/draw your own field β export GeoJSON β upload it β run optimization.
|
| </div>
|
| """,
|
| elem_classes=["small-note"],
|
| )
|
|
|
|
|
| with gr.Column(scale=1.2):
|
| with gr.Tabs():
|
| with gr.Tab("Optimization"):
|
| gr.Markdown("### π CV vs Number of Sensors")
|
|
|
| plot_output = gr.Plot(label="CV vs Number of Sensors")
|
|
|
| table_output = gr.Dataframe(
|
| label="Summary by grid size",
|
| headers=["cell_size_m", "n_sensors", "cv_percent"],
|
| interactive=False,
|
| )
|
|
|
| gr.Markdown(
|
| """
|
| <div class="small-note">
|
| The optimal configuration is marked with a star β on the graph, and corresponds to the
|
| lowest coefficient of variation (CV%) in predicted soil moisture, subject to the rule
|
| that if CVs are similar we prefer fewer sensors.
|
| </div>
|
| """,
|
| elem_classes=["small-note"],
|
| )
|
|
|
| with gr.Tab("Sensor layout preview"):
|
| gr.Markdown("### πΊοΈ SM basemap and sensor locations")
|
|
|
| map_cell_size_input = gr.Dropdown(
|
| label="Grid cell size for map (m)",
|
| choices=[5, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100],
|
| value=10,
|
| interactive=True,
|
| info="Choose one grid size to preview SM map and centroid locations.",
|
| )
|
|
|
| map_button = gr.Button(
|
| "Show centroid map", variant="secondary"
|
| )
|
|
|
| map_html_output = gr.HTML(
|
| label="Field SM map and sensor centroids"
|
| )
|
|
|
| centroid_table_output = gr.Dataframe(
|
| label=(
|
| "Centroid coordinates "
|
| "(sensor_id, Longitude (Β°E), Latitude (Β°S), sm_pred)"
|
| ),
|
| interactive=False,
|
| )
|
|
|
| gr.Markdown(
|
| """
|
| <div class="small-note">
|
| The coloured grid shows predicted soil moisture (%) from the model.
|
| Red points (layers added on top) mark sensor locations with their coordinates and SM values.
|
| </div>
|
| """,
|
| elem_classes=["small-note"],
|
| )
|
|
|
| gr.Markdown(
|
| """
|
| ---
|
| <div class="small-note">
|
| Prototype developed around GIIMS soil-moisture workflow. Exported CSVs (per grid size)
|
| can be used for further analysis or for designing field experiments.
|
| </div>
|
| """,
|
| elem_classes=["small-note"],
|
| )
|
|
|
| run_button.click(
|
| fn=run_sensor_optimization,
|
| inputs=[date_input, geojson_input, cell_sizes_input],
|
| outputs=[plot_output, table_output],
|
| )
|
|
|
| map_button.click(
|
| fn=show_centroid_map,
|
| inputs=[date_input, geojson_input, map_cell_size_input],
|
| outputs=[map_html_output, centroid_table_output],
|
| )
|
|
|
| search_button.click(
|
| fn=update_drawer_map,
|
| inputs=[search_box],
|
| outputs=[drawer_map_html],
|
| )
|
|
|
| example_button.click(
|
| fn=load_example_aoi,
|
| inputs=None,
|
| outputs=[geojson_input],
|
| )
|
|
|
| if __name__ == "__main__":
|
| demo.launch()
|
|
|