Update src/streamlit_app.py
Browse files- src/streamlit_app.py +37 -4
src/streamlit_app.py
CHANGED
|
@@ -655,6 +655,13 @@ with tabs[4]:
|
|
| 655 |
else:
|
| 656 |
recs.append(f"`{r['Feature']}` neutral for `{target}`")
|
| 657 |
st.write("\n".join(recs))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 658 |
|
| 659 |
# --- Hugging Face Router Chat API (OpenAI-Compatible Format) ---
|
| 660 |
import requests, textwrap
|
|
@@ -766,6 +773,15 @@ with tabs[8]:
|
|
| 766 |
# ----- Smart Advisor tab (Role-based Insights)
|
| 767 |
with tabs[9]:
|
| 768 |
st.subheader(" Smart Advisor β Role-Based Insights")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 769 |
|
| 770 |
# -------------------------
|
| 771 |
# 1. Role hierarchy and descriptions
|
|
@@ -891,10 +907,12 @@ with tabs[9]:
|
|
| 891 |
role = st.selectbox("Select Your Role", list(roles.keys()), index=10)
|
| 892 |
st.caption(f" Context: {roles[role]}")
|
| 893 |
|
| 894 |
-
if
|
| 895 |
st.warning("Please run the AutoML + SHAP step first to generate recommendations.")
|
| 896 |
else:
|
| 897 |
-
|
|
|
|
|
|
|
| 898 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 899 |
if not HF_TOKEN:
|
| 900 |
st.error("HF_TOKEN not found. Please set it as an environment variable or in secrets.toml.")
|
|
@@ -949,7 +967,7 @@ with tabs[9]:
|
|
| 949 |
}
|
| 950 |
|
| 951 |
with st.spinner(f"Generating role-based advisory for {role}..."):
|
| 952 |
-
resp = requests.post(API_URL, headers=headers, json=payload, timeout=
|
| 953 |
if resp.status_code == 200:
|
| 954 |
data = resp.json()
|
| 955 |
msg = (
|
|
@@ -961,6 +979,12 @@ with tabs[9]:
|
|
| 961 |
if msg:
|
| 962 |
st.markdown(f"### Advisory for {role}")
|
| 963 |
st.info(msg)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 964 |
# ---- Dynamic Data-Driven Highlights ----
|
| 965 |
if role in ["Chief General Manager β PP&C", "Plant Head", "Process Optimization Head (PP&C)"]:
|
| 966 |
st.markdown("#### π Shift Highlights β Data-Driven Summary")
|
|
@@ -1018,7 +1042,7 @@ with tabs[9]:
|
|
| 1018 |
|
| 1019 |
# 6οΈβ£ Optional: Link to SHAP recs for validation
|
| 1020 |
if isinstance(recs, list) and recs:
|
| 1021 |
-
st.markdown("####
|
| 1022 |
matches = [r for r in recs if any(k in r for k in ["furnace", "energy", "yield", "slag", "power"])]
|
| 1023 |
if matches:
|
| 1024 |
st.info("Aligned SHAP Recommendations:\n\n- " + "\n- ".join(matches))
|
|
@@ -1031,6 +1055,15 @@ with tabs[9]:
|
|
| 1031 |
st.warning(f"Empty response.\nRaw: {data}")
|
| 1032 |
else:
|
| 1033 |
st.error(f"HF API error {resp.status_code}: {resp.text}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1034 |
|
| 1035 |
# -------------------------
|
| 1036 |
# 4. Optional role-based KPIs
|
|
|
|
| 655 |
else:
|
| 656 |
recs.append(f"`{r['Feature']}` neutral for `{target}`")
|
| 657 |
st.write("\n".join(recs))
|
| 658 |
+
# --- Persist key results for Smart Advisor tab ---
|
| 659 |
+
st.session_state["recs"] = recs
|
| 660 |
+
st.session_state["final_r2"] = final_r2
|
| 661 |
+
st.session_state["use_case"] = use_case
|
| 662 |
+
st.session_state["target"] = target
|
| 663 |
+
st.session_state["last_automl_ts"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
| 664 |
+
|
| 665 |
|
| 666 |
# --- Hugging Face Router Chat API (OpenAI-Compatible Format) ---
|
| 667 |
import requests, textwrap
|
|
|
|
| 773 |
# ----- Smart Advisor tab (Role-based Insights)
|
| 774 |
with tabs[9]:
|
| 775 |
st.subheader(" Smart Advisor β Role-Based Insights")
|
| 776 |
+
if "last_automl_ts" in st.session_state:
|
| 777 |
+
st.caption(f" Model baseline last trained: {st.session_state['last_automl_ts']}")
|
| 778 |
+
|
| 779 |
+
# --- Load persisted results from AutoML tab ---
|
| 780 |
+
recs = st.session_state.get("recs", [])
|
| 781 |
+
final_r2 = st.session_state.get("final_r2", 0)
|
| 782 |
+
use_case = st.session_state.get("use_case", "N/A")
|
| 783 |
+
target = st.session_state.get("target", "N/A")
|
| 784 |
+
|
| 785 |
|
| 786 |
# -------------------------
|
| 787 |
# 1. Role hierarchy and descriptions
|
|
|
|
| 907 |
role = st.selectbox("Select Your Role", list(roles.keys()), index=10)
|
| 908 |
st.caption(f" Context: {roles[role]}")
|
| 909 |
|
| 910 |
+
if not recs:
|
| 911 |
st.warning("Please run the AutoML + SHAP step first to generate recommendations.")
|
| 912 |
else:
|
| 913 |
+
generate_clicked = st.button("Generate Role-Based Advisory")
|
| 914 |
+
if generate_clicked and not st.session_state.get("hf_ran_once", False):
|
| 915 |
+
st.session_state["hf_ran_once"] = True
|
| 916 |
HF_TOKEN = os.getenv("HF_TOKEN")
|
| 917 |
if not HF_TOKEN:
|
| 918 |
st.error("HF_TOKEN not found. Please set it as an environment variable or in secrets.toml.")
|
|
|
|
| 967 |
}
|
| 968 |
|
| 969 |
with st.spinner(f"Generating role-based advisory for {role}..."):
|
| 970 |
+
resp = requests.post(API_URL, headers=headers, json=payload, timeout=120)
|
| 971 |
if resp.status_code == 200:
|
| 972 |
data = resp.json()
|
| 973 |
msg = (
|
|
|
|
| 979 |
if msg:
|
| 980 |
st.markdown(f"### Advisory for {role}")
|
| 981 |
st.info(msg)
|
| 982 |
+
st.session_state["last_advisory_msg"] = msg
|
| 983 |
+
st.session_state["last_role"] = role
|
| 984 |
+
# --- Timestamp the advisory ---
|
| 985 |
+
st.session_state["last_advisory_ts"] = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
| 986 |
+
st.caption(f"π Last updated: {st.session_state['last_advisory_ts']}")
|
| 987 |
+
|
| 988 |
# ---- Dynamic Data-Driven Highlights ----
|
| 989 |
if role in ["Chief General Manager β PP&C", "Plant Head", "Process Optimization Head (PP&C)"]:
|
| 990 |
st.markdown("#### π Shift Highlights β Data-Driven Summary")
|
|
|
|
| 1042 |
|
| 1043 |
# 6οΈβ£ Optional: Link to SHAP recs for validation
|
| 1044 |
if isinstance(recs, list) and recs:
|
| 1045 |
+
st.markdown("#### Cross-Verification with SHAP Insights")
|
| 1046 |
matches = [r for r in recs if any(k in r for k in ["furnace", "energy", "yield", "slag", "power"])]
|
| 1047 |
if matches:
|
| 1048 |
st.info("Aligned SHAP Recommendations:\n\n- " + "\n- ".join(matches))
|
|
|
|
| 1055 |
st.warning(f"Empty response.\nRaw: {data}")
|
| 1056 |
else:
|
| 1057 |
st.error(f"HF API error {resp.status_code}: {resp.text}")
|
| 1058 |
+
# --- Display last advisory if available ---
|
| 1059 |
+
if "last_advisory_msg" in st.session_state:
|
| 1060 |
+
st.markdown(f"### Last Advisory ({st.session_state.get('last_role', 'N/A')})")
|
| 1061 |
+
st.info(st.session_state["last_advisory_msg"])
|
| 1062 |
+
if "last_advisory_ts" in st.session_state:
|
| 1063 |
+
st.caption(f"Last updated: {st.session_state['last_advisory_ts']}")
|
| 1064 |
+
if "last_automl_ts" in st.session_state:
|
| 1065 |
+
st.caption(f"Model baseline last run at: {st.session_state['last_automl_ts']}")
|
| 1066 |
+
|
| 1067 |
|
| 1068 |
# -------------------------
|
| 1069 |
# 4. Optional role-based KPIs
|