Spaces:
Sleeping
Sleeping
| import os | |
| import re | |
| import time | |
| import logging | |
| import pandas as pd | |
| from datetime import timezone | |
| import gradio as gr | |
| from nbm_client import ( | |
| fetch_point_forecast_df, | |
| fetch_point_probabilities, | |
| get_latest_hourly_dataset_url, | |
| get_latest_3hr_dataset_url, | |
| fetch_cloud_layers, | |
| fetch_precip_type_probs, | |
| fetch_snow_level_kft, | |
| fetch_point_forecast_df_blended, | |
| ) | |
| from plot_utils import ( | |
| make_temp_dew_wind_fig, | |
| make_cloud_precip_fig, | |
| make_snow_prob_fig, | |
| make_snow_6h_accum_fig, | |
| make_window_snow_fig, | |
| make_cloud_layers_fig, | |
| make_precip_type_fig, | |
| make_snow_level_fig, | |
| make_wind_rose_fig, | |
| make_wind_rose_grid, | |
| ) | |
| from nbm_viewer_client import ( | |
| list_years, | |
| list_months, | |
| list_days, | |
| list_versions, | |
| list_hours, | |
| list_locations, | |
| fetch_location_csv, | |
| ) | |
| from nbm_viewer_emulation import ( | |
| make_temp_maxmin_percentile_figure, | |
| prob_exceed_series, | |
| make_prob_exceed_figure, | |
| ) | |
| INTRO = ( | |
| "Click anywhere on the map to fetch a National Blend of Models (NBM) " | |
| "hourly point forecast (next 24 hours) from NOAA NOMADS. Times are UTC." | |
| ) | |
| def run_forecast(lat, lon, hours=24): | |
| """Generator to provide live status updates to the UI and console logs.""" | |
| logging.basicConfig(level=logging.INFO) | |
| t0 = time.perf_counter() | |
| # Placeholders for progressive updates | |
| table_df = None | |
| temp_wind_fig = None | |
| cloud_precip_fig = None | |
| snow_prob_fig = None | |
| cloud_layers_fig = None | |
| precip_type_fig = None | |
| snow_level_fig = None | |
| wind_rose_fig = None | |
| def y(msg): | |
| print(msg, flush=True) | |
| elapsed = time.perf_counter() - t0 | |
| return ( | |
| gr.update(value=f"{msg} (elapsed {elapsed:.1f}s)"), | |
| table_df, | |
| temp_wind_fig, | |
| cloud_precip_fig, | |
| snow_prob_fig, | |
| None, | |
| None, | |
| None, | |
| cloud_layers_fig, | |
| precip_type_fig, | |
| snow_level_fig, | |
| wind_rose_fig, | |
| ) | |
| if lat is None or lon is None: | |
| yield y("Click map or enter lat/lon.") | |
| return | |
| try: | |
| lat = float(lat) | |
| lon = float(lon) | |
| except Exception: | |
| yield y("Invalid lat/lon.") | |
| return | |
| yield y(f"Starting forecast for lat={lat:.5f}, lon={lon:.5f}; hours={hours}") | |
| try: | |
| dataset_url = None | |
| if hours <= 36: | |
| yield y("Discovering latest NBM 1-hr dataset on NOMADS ...") | |
| try: | |
| dataset_url = get_latest_hourly_dataset_url() | |
| except Exception as e1: | |
| print(f"1-hr discovery failed: {e1}. Falling back to 3-hr.") | |
| if dataset_url is None: | |
| yield y("Discovering latest NBM 3-hr dataset on NOMADS ...") | |
| dataset_url = get_latest_3hr_dataset_url() | |
| yield y(f"Dataset selected: {dataset_url}") | |
| else: | |
| # For long horizons, build a blended time series: 1-hr first 36h + 3-hr remainder | |
| yield y("Building blended 1-hr + 3-hr timeline for long range ...") | |
| df, meta, blended_label = fetch_point_forecast_df_blended(lat, lon, hours) | |
| table_df = df | |
| # Produce charts below using df; set a label to reflect blended sources | |
| dataset_url = blended_label | |
| yield y(f"Dataset selected: {dataset_url}") | |
| except Exception as e: | |
| yield y(f"Failed to locate latest NBM dataset: {e}") | |
| return | |
| if hours <= 36: | |
| try: | |
| yield y("Opening dataset and indexing nearest grid point ...") | |
| df, meta = fetch_point_forecast_df(dataset_url, lat, lon, hours=hours) | |
| except Exception as e: | |
| yield y(f"Error fetching forecast at {lat:.3f}, {lon:.3f}: {e}\nDataset: {dataset_url}") | |
| return | |
| header = ( | |
| f"NBM hourly forecast (next {len(df)} hrs) at " | |
| f"{meta['lat']:.3f}, {meta['lon']:.3f} (grid: lat[{meta['ilat']}], lon[{meta['ilon']}])\n" | |
| f"Dataset: {dataset_url} | total time {time.perf_counter()-t0:.1f}s" | |
| ) | |
| # Build charts | |
| try: | |
| temp_wind_fig = make_temp_dew_wind_fig(df) | |
| cloud_precip_fig = make_cloud_precip_fig(df) | |
| except Exception as e: | |
| print(f"Plot error: {e}") | |
| temp_wind_fig = None | |
| cloud_precip_fig = None | |
| # Try to fetch snow-related probabilities akin to NBM Viewer | |
| try: | |
| # Snow PoE is generally only available on 1-hr feeds; for blended runs, fetch from 1-hr | |
| if hours <= 36: | |
| src_url = dataset_url | |
| else: | |
| src_url = get_latest_hourly_dataset_url() | |
| t_idx, prob_map = fetch_point_probabilities(src_url, lat, lon, hours=min(hours, 36)) | |
| if len(prob_map) == 0: | |
| # Heuristic fallback from snowfall series (deterministic or estimate) | |
| import pandas as _pd | |
| x = _pd.to_datetime(df["time_utc"], utc=True, errors="coerce") | |
| snow_series = None | |
| if "snow_in" in df.columns: | |
| snow_series = _pd.Series(df["snow_in"].astype(float).values, index=x) | |
| elif "snow_est_in" in df.columns: | |
| snow_series = _pd.Series(df["snow_est_in"].astype(float).values, index=x) | |
| if snow_series is not None: | |
| from nbm_client import estimate_snow_exceedance_from_series | |
| thresholds = [0.1, 0.3, 0.5, 1.5, 2.0, 2.5, 4.0] | |
| temp_k_vals = (df["temp_F"].astype(float).values - 32.0) * 5.0/9.0 + 273.15 | |
| prob_map = estimate_snow_exceedance_from_series(x, snow_series.values, temp_k_vals, thresholds) | |
| if len(prob_map) > 0: | |
| snow_prob_fig = make_snow_prob_fig(t_idx if len(t_idx)>0 else x, prob_map) | |
| except Exception as e: | |
| print(f"Probability fetch/plot error: {e}") | |
| snow_prob_fig = None | |
| # Cloud layers and precip type probabilities | |
| try: | |
| # Cloud layers are typically only on 1-hr; for long horizons, fetch 1-hr window only | |
| layers_url = dataset_url if hours <= 36 else get_latest_hourly_dataset_url() | |
| t_layers, layers = fetch_cloud_layers(layers_url, lat, lon, hours=min(hours, 36)) | |
| if len(layers) > 0: | |
| import pandas as _pd | |
| x = _pd.to_datetime(df["time_utc"], utc=True, errors="coerce") | |
| total = _pd.Series(df["cloud_cover_pct"].astype(float).values, index=x) if "cloud_cover_pct" in df.columns else None | |
| # preserve input order | |
| layers_ordered = {k: layers[k] for k in layers} | |
| cloud_layers_fig = make_cloud_layers_fig(t_layers, layers_ordered, total) | |
| except Exception as e: | |
| print(f"Cloud layers plot error: {e}") | |
| try: | |
| # Precip type probs available on 1-hr and often on 3-hr; try fetch, else fallback heuristic | |
| ptype_url = dataset_url if hours <= 36 else get_latest_3hr_dataset_url() | |
| t_ptype, ptype = fetch_precip_type_probs(ptype_url, lat, lon, hours=hours) | |
| if len(ptype) == 0: | |
| from nbm_client import estimate_precip_type_probs_from_surface | |
| t_ptype, ptype = estimate_precip_type_probs_from_surface(ptype_url, lat, lon, hours=min(hours, 36)) | |
| if len(ptype) > 0: | |
| precip_type_fig = make_precip_type_fig(t_ptype, ptype) | |
| except Exception as e: | |
| print(f"Precip type plot error: {e}") | |
| # Snow level with precip overlay | |
| try: | |
| # Snow level is usually 1-hr only; for long horizons limit to 1-hr window | |
| snowlvl_url = dataset_url if hours <= 36 else get_latest_hourly_dataset_url() | |
| t_sl, snow_kft = fetch_snow_level_kft(snowlvl_url, lat, lon, hours=min(hours, 36)) | |
| if snow_kft is not None and len(snow_kft) > 0: | |
| import pandas as _pd | |
| x = _pd.to_datetime(df["time_utc"], utc=True, errors="coerce") | |
| # Compute 6h precip window from available precip | |
| if "precip_in" in df.columns: | |
| # estimate step | |
| step_hours = 1.0 | |
| if len(x) > 1: | |
| step_hours = max(1.0, (x[1] - x[0]).total_seconds() / 3600.0) | |
| w6 = max(1, int(round(6.0 / step_hours))) | |
| p6 = _pd.Series(df["precip_in"].astype(float).values, index=x).rolling(window=w6, min_periods=1).sum() | |
| else: | |
| p6 = None | |
| snow_level_fig = make_snow_level_fig(t_sl, snow_kft, p6) | |
| except Exception as e: | |
| print(f"Snow level plot error: {e}") | |
| # Wind rose (if direction present) | |
| try: | |
| import pandas as _pd | |
| if "wdir_deg" in df.columns and "wind_mph" in df.columns: | |
| x = _pd.to_datetime(df["time_utc"], utc=True, errors="coerce") | |
| wdir = _pd.Series(df["wdir_deg"].astype(float).values, index=x) | |
| wspd = _pd.Series(df["wind_mph"].astype(float).values, index=x) | |
| # Render per-step roses as small multiples | |
| wind_rose_fig = make_wind_rose_grid(x, wdir, wspd, step_hours=3.0 if hours>36 else 1.0) | |
| except Exception as e: | |
| print(f"Wind rose plot error: {e}") | |
| # Deterministic snowfall derivations if available | |
| snow6_fig = None | |
| snow24_fig = None | |
| snow48_fig = None | |
| snow72_fig = None | |
| try: | |
| import pandas as _pd | |
| x = _pd.to_datetime(df["time_utc"], utc=True, errors="coerce") | |
| # Estimate step hours from time axis | |
| if len(x) > 1: | |
| step_hours = max(1.0, (x[1] - x[0]).total_seconds() / 3600.0) | |
| else: | |
| step_hours = 1.0 | |
| # Prefer deterministic snowfall; else fallback estimate. Plot even if mostly zeros (visible baseline) | |
| snow_series = None | |
| if "snow_in" in df.columns: | |
| snow_series = _pd.Series(df["snow_in"].astype(float).values, index=x) | |
| elif "snow_est_in" in df.columns: | |
| snow_series = _pd.Series(df["snow_est_in"].astype(float).values, index=x) | |
| if snow_series is not None and not snow_series.isna().all(): | |
| w6 = max(1, int(round(6.0 / step_hours))) | |
| snow6 = snow_series.rolling(window=w6, min_periods=1).sum() | |
| accum = snow6.cumsum() | |
| snow6_fig = make_snow_6h_accum_fig(x, snow6, accum) | |
| for win in (24, 48, 72): | |
| w = max(1, int(round(win / step_hours))) | |
| s = snow_series.rolling(window=w, min_periods=1).sum() | |
| fig = make_window_snow_fig(x, s, f"{win} hr") | |
| if win == 24: | |
| snow24_fig = fig | |
| elif win == 48: | |
| snow48_fig = fig | |
| else: | |
| snow72_fig = fig | |
| except Exception as e: | |
| print(f"Snow deterministic plot error: {e}") | |
| table_df = df | |
| header = ( | |
| f"NBM hourly forecast (next {len(df)} hrs) at " | |
| f"{meta['lat']:.3f}, {meta['lon']:.3f} (grid: lat[{meta['ilat']}], lon[{meta['ilon']}])\n" | |
| f"Dataset: {dataset_url} | total time {time.perf_counter()-t0:.1f}s" | |
| ) | |
| yield ( | |
| gr.update(value=header), | |
| table_df, | |
| temp_wind_fig, | |
| cloud_precip_fig, | |
| snow_prob_fig, | |
| snow6_fig, | |
| snow24_fig, | |
| snow48_fig, | |
| cloud_layers_fig, | |
| precip_type_fig, | |
| snow_level_fig, | |
| wind_rose_fig, | |
| ) | |
| return | |
| with gr.Blocks(title="NBM Point Forecast (NOAA NOMADS)") as demo: | |
| gr.Markdown("# NBM Point Forecast (NOAA NOMADS)") | |
| gr.Markdown(INTRO) | |
| with gr.Tabs(): | |
| with gr.TabItem("NOMADS (1-hr/3-hr)"): | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| # Leaflet map embedded via HTML; clicks update lat/lon inputs below. | |
| map_html = gr.HTML( | |
| value=""" | |
| <div id=\"leaflet_map\" style=\"height:520px;border:1px solid #ccc;\"></div> | |
| <link rel=\"stylesheet\" href=\"https://unpkg.com/leaflet@1.9.4/dist/leaflet.css\" crossorigin=\"\" /> | |
| <script src=\"https://unpkg.com/leaflet@1.9.4/dist/leaflet.js\" crossorigin=\"\"></script> | |
| <script> | |
| (function(){ | |
| const root = document.getElementById('leaflet_map'); | |
| if (!root) return; | |
| if (root._leaflet_map_initialized) return; | |
| root._leaflet_map_initialized = true; | |
| const map = L.map(root).setView([39.0, -98.0], 4); | |
| L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', { | |
| maxZoom: 19, | |
| attribution: '© OpenStreetMap contributors' | |
| }).addTo(map); | |
| let marker; | |
| function setInputs(lat, lon){ | |
| const app = window.gradioApp ? window.gradioApp() : document; | |
| const latBox = app.querySelector('#lat_input input'); | |
| const lonBox = app.querySelector('#lon_input input'); | |
| if (latBox){ latBox.value = lat.toFixed(5); latBox.dispatchEvent(new Event('input', {bubbles:true})); } | |
| if (lonBox){ lonBox.value = lon.toFixed(5); lonBox.dispatchEvent(new Event('input', {bubbles:true})); } | |
| } | |
| map.on('click', function(e){ | |
| const lat = e.latlng.lat; const lon = e.latlng.lng; | |
| if (marker) map.removeLayer(marker); | |
| marker = L.marker([lat, lon]).addTo(map); | |
| setInputs(lat, lon); | |
| }); | |
| })(); | |
| </script> | |
| """, | |
| label="Map (click to set point)", | |
| ) | |
| lat_in = gr.Number(label="Latitude", value=None, elem_id="lat_input") | |
| lon_in = gr.Number(label="Longitude", value=None, elem_id="lon_input") | |
| hours = gr.Slider( | |
| minimum=6, | |
| maximum=240, | |
| value=24, | |
| step=3, | |
| label="Hours to fetch (1-hr <=36h, 3-hr beyond)", | |
| ) | |
| btn = gr.Button("Fetch NBM Forecast") | |
| with gr.Column(scale=5): | |
| status = gr.Textbox( | |
| label="Status", | |
| value="Ready", | |
| interactive=False, | |
| ) | |
| table = gr.Dataframe( | |
| headers=[ | |
| "time_utc", | |
| "temp_F", | |
| "dewpoint_F", | |
| "wind_mph", | |
| "gust_mph", | |
| "cloud_cover_pct", | |
| "precip_in", | |
| ], | |
| label="NBM hourly forecast", | |
| wrap=True, | |
| row_count=(0, "dynamic"), | |
| ) | |
| temp_wind_plot = gr.Plot(label="Temp/Dewpoint/Wind") | |
| cloud_precip_plot = gr.Plot(label="Clouds and Precip") | |
| snow_prob_plot = gr.Plot(label="Snow Probabilities (exceedance)") | |
| snow6_plot = gr.Plot(label="6 hr Snow + Accum") | |
| snow24_plot = gr.Plot(label="24 hr Snowfall") | |
| snow48_plot = gr.Plot(label="48 hr Snowfall") | |
| cloud_layers_plot = gr.Plot(label="Cloud Layers (%)") | |
| precip_type_plot = gr.Plot(label="Precip Type Probabilities") | |
| snow_level_plot = gr.Plot(label="Snow Level + Precip") | |
| wind_rose_plot = gr.Plot(label="Wind Rose (10 m)") | |
| # Triggers for NOMADS tab | |
| btn.click( | |
| run_forecast, | |
| inputs=[lat_in, lon_in, hours], | |
| outputs=[status, table, temp_wind_plot, cloud_precip_plot, snow_prob_plot, snow6_plot, snow24_plot, snow48_plot, cloud_layers_plot, precip_type_plot, snow_level_plot, wind_rose_plot], | |
| ) | |
| lat_in.change( | |
| run_forecast, | |
| inputs=[lat_in, lon_in, hours], | |
| outputs=[status, table, temp_wind_plot, cloud_precip_plot, snow_prob_plot, snow6_plot, snow24_plot, snow48_plot, cloud_layers_plot, precip_type_plot, snow_level_plot, wind_rose_plot], | |
| ) | |
| lon_in.change( | |
| run_forecast, | |
| inputs=[lat_in, lon_in, hours], | |
| outputs=[status, table, temp_wind_plot, cloud_precip_plot, snow_prob_plot, snow6_plot, snow24_plot, snow48_plot, cloud_layers_plot, precip_type_plot, snow_level_plot, wind_rose_plot], | |
| ) | |
| with gr.TabItem("NBM Viewer (CSV)"): | |
| gr.Markdown("Emulate the NBM 1D Viewer using its CSV archive: box/whisker and probability charts.") | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| year = gr.Dropdown(label="Year", choices=[], value=None) | |
| month = gr.Dropdown(label="Month", choices=[], value=None) | |
| day = gr.Dropdown(label="Day", choices=[], value=None) | |
| version = gr.Dropdown(label="Version", choices=[], value=None) | |
| hour = gr.Dropdown(label="Hour (UTC)", choices=[], value=None) | |
| location = gr.Dropdown(label="Location (NBM Viewer)", choices=[], value=None) | |
| load_btn = gr.Button("Load Viewer CSV") | |
| with gr.Column(scale=5): | |
| viewer_status = gr.Textbox(label="Status", value="Ready", interactive=False) | |
| maxmin_fig = gr.Plot(label="Max/Min T Percentiles") | |
| with gr.Row(): | |
| prob_field = gr.Textbox(label="Prob Field (e.g., TMP_Max_2 m above ground)", value="TMP_Max_2 m above ground") | |
| prob_op = gr.Radio(label="Operator", choices=[">=","<="], value=">=") | |
| prob_value = gr.Number(label="Threshold (F)", value=40) | |
| make_prob = gr.Button("Compute Probability") | |
| prob_fig = gr.Plot(label="Probability of Exceedance") | |
| # Populate cascading date selectors | |
| def _init_years(): | |
| try: | |
| ys = list_years() | |
| except Exception as e: | |
| ys = [] | |
| return gr.update(choices=ys, value=(ys[-1] if ys else None)), "Years loaded." if ys else "No years." | |
| def _on_year(y): | |
| ms = list_months(y) if y else [] | |
| return gr.update(choices=ms, value=(ms[-1] if ms else None)) | |
| def _on_month(y, m): | |
| ds = list_days(y, m) if (y and m) else [] | |
| return gr.update(choices=ds, value=(ds[-1] if ds else None)) | |
| def _on_day(y, m, d): | |
| vs = list_versions(y, m, d) if (y and m and d) else [] | |
| return gr.update(choices=vs, value=(vs[0] if vs else None)) | |
| def _on_version(y, m, d, v): | |
| hs = list_hours(y, m, d, v) if (y and m and d and v) else [] | |
| # Use latest available hour by default | |
| return gr.update(choices=hs, value=(hs[-1] if hs else None)) | |
| def _on_hour(y, m, d, v, h): | |
| locs = list_locations(y, m, d, v, h) if (y and m and d and v and h) else [] | |
| # Keep list manageable | |
| # Preselect a common mountainous example if present | |
| sel = None | |
| for cand in ("Bridgers", "Bridger", "Alta", "Aspen Highland Peak"): | |
| if cand in locs: | |
| sel = cand | |
| break | |
| if not sel and locs: | |
| sel = locs[0] | |
| return gr.update(choices=locs, value=sel) | |
| def _load_csv(y, m, d, v, h, loc): | |
| try: | |
| df = fetch_location_csv(y, m, d, v, h, loc) | |
| except Exception as e: | |
| return (f"Failed to load CSV: {e}", None) | |
| try: | |
| fig = make_temp_maxmin_percentile_figure(df) | |
| return (f"Loaded {loc}.csv at {y}/{m}/{d} {v} {h}Z", fig, df) | |
| except Exception as e: | |
| return (f"Loaded CSV but plot failed: {e}", None, df) | |
| def _make_prob(df: pd.DataFrame, field: str, op: str, val: float): | |
| if df is None or len(df) == 0: | |
| return "Load CSV first.", None | |
| poe = prob_exceed_series(df, field=field, operator=("<=" if op == "<=" else ">="), threshold_value=float(val), units='F') | |
| fig = make_prob_exceed_figure(poe.index, poe, title=f"Prob {field} {op} {val}") | |
| return "OK", fig | |
| # Initialize years and status on app load | |
| demo.load(_init_years, inputs=None, outputs=[year, viewer_status]) | |
| year.change(_on_year, inputs=[year], outputs=[month]) | |
| month.change(_on_month, inputs=[year, month], outputs=[day]) | |
| day.change(_on_day, inputs=[year, month, day], outputs=[version]) | |
| version.change(_on_version, inputs=[year, month, day, version], outputs=[hour]) | |
| hour.change(_on_hour, inputs=[year, month, day, version, hour], outputs=[location]) | |
| csv_state = gr.State(value=None) | |
| def _load_and_store(y, m, d, v, h, loc): | |
| status, fig, df = _load_csv(y, m, d, v, h, loc) | |
| return status, fig, df | |
| load_btn.click(_load_and_store, inputs=[year, month, day, version, hour, location], outputs=[viewer_status, maxmin_fig, csv_state]) | |
| make_prob.click(_make_prob, inputs=[csv_state, prob_field, prob_op, prob_value], outputs=[viewer_status, prob_fig]) | |
| if __name__ == "__main__": | |
| demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", "7860"))) | |