Update src/streamlit_app.py
Browse files- src/streamlit_app.py +49 -21
src/streamlit_app.py
CHANGED
|
@@ -831,31 +831,59 @@ with tabs[4]:
|
|
| 831 |
|
| 832 |
|
| 833 |
# --- Call HF Llama-3-70B-Instruct API for summary ---
|
| 834 |
-
|
| 835 |
-
|
|
|
|
|
|
|
| 836 |
if not HF_TOKEN:
|
| 837 |
-
st.error("HF_TOKEN not
|
| 838 |
else:
|
| 839 |
-
|
| 840 |
-
API_URL = "https://api-inference.huggingface.co/models/meta-llama/Llama-3-70B-Instruct"
|
| 841 |
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
| 842 |
-
|
| 843 |
-
|
| 844 |
-
|
| 845 |
-
|
| 846 |
-
|
| 847 |
-
|
| 848 |
-
|
| 849 |
-
|
| 850 |
-
|
| 851 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 852 |
resp = requests.post(API_URL, headers=headers, json=payload, timeout=90)
|
| 853 |
-
|
| 854 |
-
|
| 855 |
-
|
| 856 |
-
|
| 857 |
-
|
| 858 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 859 |
|
| 860 |
# ----- Business Impact tab
|
| 861 |
with tabs[5]:
|
|
|
|
| 831 |
|
| 832 |
|
| 833 |
# --- Call HF Llama-3-70B-Instruct API for summary ---
|
| 834 |
+
# --- Call HF Llama-3-70B-Instruct API for summary (robust + debug-safe) ---
|
| 835 |
+
import requests, json, textwrap
|
| 836 |
+
|
| 837 |
+
HF_TOKEN = os.getenv("HF_TOKEN") # Works on Hugging Face Spaces
|
| 838 |
if not HF_TOKEN:
|
| 839 |
+
st.error("HF_TOKEN not detected. Check the Secrets tab in your Space settings.")
|
| 840 |
else:
|
| 841 |
+
API_URL = "https://api-inference.huggingface.co/models/meta-llama/Llama-3-8B-Instruct"
|
|
|
|
| 842 |
headers = {"Authorization": f"Bearer {HF_TOKEN}"}
|
| 843 |
+
|
| 844 |
+
prompt = textwrap.dedent(f"""
|
| 845 |
+
You are an expert metallurgical process advisor.
|
| 846 |
+
Based on these SHAP-derived recommendations:
|
| 847 |
+
{recommendations}
|
| 848 |
+
Target: {target}
|
| 849 |
+
Use case: {use_case}
|
| 850 |
+
Summarize in three concise, professional lines what the operator should do this shift.
|
| 851 |
+
""")
|
| 852 |
+
|
| 853 |
+
payload = {
|
| 854 |
+
"inputs": prompt,
|
| 855 |
+
"parameters": {"max_new_tokens": 150, "temperature": 0.6}
|
| 856 |
+
}
|
| 857 |
+
|
| 858 |
+
with st.spinner("Generating operator note (Llama-3-8B)…"):
|
| 859 |
resp = requests.post(API_URL, headers=headers, json=payload, timeout=90)
|
| 860 |
+
|
| 861 |
+
# --- Debug section (safe, no secrets printed) ---
|
| 862 |
+
try:
|
| 863 |
+
data = resp.json()
|
| 864 |
+
st.caption("Raw HF response:")
|
| 865 |
+
st.json(data)
|
| 866 |
+
except Exception as ex:
|
| 867 |
+
st.warning(f"HF raw response parse error: {ex}")
|
| 868 |
+
st.text(resp.text)
|
| 869 |
+
data = None
|
| 870 |
+
|
| 871 |
+
# --- Extract generated text robustly ---
|
| 872 |
+
text = ""
|
| 873 |
+
if isinstance(data, list) and len(data) > 0 and "generated_text" in data[0]:
|
| 874 |
+
text = data[0]["generated_text"].strip()
|
| 875 |
+
elif isinstance(data, dict) and "generated_text" in data:
|
| 876 |
+
text = data["generated_text"].strip()
|
| 877 |
+
elif isinstance(data, str):
|
| 878 |
+
text = data.strip()
|
| 879 |
+
|
| 880 |
+
if text:
|
| 881 |
+
st.success("✅ Operator Advisory Generated:")
|
| 882 |
+
st.info(text)
|
| 883 |
+
else:
|
| 884 |
+
st.warning("Operator advisory skipped: no text returned from model.")
|
| 885 |
+
|
| 886 |
+
|
| 887 |
|
| 888 |
# ----- Business Impact tab
|
| 889 |
with tabs[5]:
|