Spaces:
Sleeping
Sleeping
File size: 4,256 Bytes
f87e795 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 | """
utils/api_client.py
--------------------
Centralized, cached API wrappers.
HF Spaces compatible: reads API_URL from environment variable so the
same code works locally (localhost:8000) and on HuggingFace (localhost:8000
since both services run in the same container).
"""
import os
import requests
import pandas as pd
import streamlit as st
# HF Spaces: backend always on localhost:8000 inside the container
API = os.environ.get("API_URL", "http://localhost:8000")
TIMEOUT = 15
@st.cache_data(ttl=300)
def _get(endpoint: str, params: dict | None = None):
"""Raw cached GET β returns JSON or None on any error."""
try:
r = requests.get(f"{API}{endpoint}", params=params or {}, timeout=TIMEOUT)
r.raise_for_status()
return r.json()
except requests.exceptions.ConnectionError:
return None
except requests.exceptions.Timeout:
return None
except Exception:
return None
def _df(data) -> pd.DataFrame:
if not data:
return pd.DataFrame()
if isinstance(data, list):
return pd.DataFrame(data)
if isinstance(data, dict):
return pd.DataFrame([data])
return pd.DataFrame()
# ββ Health βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def is_online() -> bool:
try:
requests.get(f"{API}/health", timeout=5)
return True
except Exception:
return False
# ββ /districts/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def fetch_stats() -> dict:
return _get("/districts/stats") or {}
def fetch_states() -> list[str]:
return _get("/districts/states") or []
def fetch_districts(state: str) -> list[str]:
return _get("/districts/list", {"state": state}) or []
def fetch_district_history(state: str, district: str) -> pd.DataFrame:
return _df(_get("/districts/history", {"state": state, "district": district}))
def fetch_top_districts(
state: str | None = None,
metric: str = "person_days_lakhs",
n: int = 12,
) -> pd.DataFrame:
params = {"metric": metric, "n": n}
if state:
params["state"] = state
return _df(_get("/districts/top", params))
def fetch_yearly_trend(state: str | None = None) -> pd.DataFrame:
params = {"state": state} if state else {}
return _df(_get("/districts/trend", params))
# ββ /predictions/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def fetch_predictions(
state: str | None = None,
district: str | None = None,
year: int | None = None,
) -> pd.DataFrame:
params = {}
if state: params["state"] = state
if district: params["district"] = district
if year: params["year"] = year
return _df(_get("/predictions/", params))
# ββ /optimizer/* βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def fetch_optimizer_results(state: str | None = None) -> pd.DataFrame:
params = {"state": state} if state else {}
return _df(_get("/optimizer/results", params))
def run_optimizer_live(
state: str | None = None,
budget_scale: float = 1.0,
min_fraction: float = 0.40,
max_fraction: float = 2.50,
) -> dict | None:
payload = {
"state": state,
"budget_scale": budget_scale,
"min_fraction": min_fraction,
"max_fraction": max_fraction,
}
try:
r = requests.post(f"{API}/optimizer/run", json=payload, timeout=60)
r.raise_for_status()
return r.json()
except requests.exceptions.ConnectionError:
st.error("Cannot reach API β backend may still be starting up, refresh in a moment.")
return None
except Exception as e:
st.error(f"Optimizer error: {e}")
return None
|