Frontened / phase /Student_view /chatbot.py
lanna_lalala;-
update lesson.py -alanna
b58ee05
# phase/Student_view/chatbot.py
import streamlit as st
import datetime, os, traceback
from huggingface_hub import InferenceClient
HF_TOKEN = os.getenv("HF_TOKEN")
GEN_MODEL = os.getenv("GEN_MODEL", "TinyLlama/TinyLlama-1.1B-Chat-v1.0") # <- default TinyLlama
if not HF_TOKEN:
st.error("⚠️ HF_TOKEN is not set. In your Space, add a Secret named HF_TOKEN.")
else:
client = InferenceClient(model=GEN_MODEL, token=HF_TOKEN, timeout=60)
TUTOR_PROMPT = (
"You are a kind Jamaican primary-school finance tutor. "
"Keep answers short, friendly, and age-appropriate. "
"Teach step-by-step with tiny examples. Avoid giving personal financial advice."
)
# -------------------------------
# History helpers
# -------------------------------
def _format_history_for_flan(messages: list[dict]) -> str:
"""Format history for text-generation style models."""
lines = []
for m in messages:
txt = (m.get("text") or "").strip()
if not txt:
continue
lines.append(("Tutor" if m.get("sender") == "assistant" else "User") + f": {txt}")
return "\n".join(lines)
def _history_as_chat_messages(messages: list[dict]) -> list[dict]:
"""Convert history to chat-completion style messages."""
msgs = [{"role": "system", "content": TUTOR_PROMPT}]
for m in messages:
txt = (m.get("text") or "").strip()
if not txt:
continue
role = "assistant" if m.get("sender") == "assistant" else "user"
msgs.append({"role": role, "content": txt})
return msgs
def _extract_chat_text(chat_resp) -> str:
"""Extract text from HF chat response."""
try:
return chat_resp.choices[0].message["content"] if isinstance(
chat_resp.choices[0].message, dict
) else chat_resp.choices[0].message.content
except Exception:
try:
return chat_resp["choices"][0]["message"]["content"]
except Exception:
return str(chat_resp)
# -------------------------------
# Reply logic
# -------------------------------
def _reply_with_hf():
if "client" not in globals():
raise RuntimeError("HF client not initialized")
try:
# 1) Prefer chat API
msgs = _history_as_chat_messages(st.session_state.get("messages", []))
chat = client.chat.completions.create(
model=GEN_MODEL,
messages=msgs,
max_tokens=300, # give enough room
temperature=0.2,
top_p=0.9,
)
return _extract_chat_text(chat).strip()
except ValueError as ve:
# 2) Fallback to text-generation if chat unsupported
if "Supported task: text-generation" in str(ve):
convo = _format_history_for_flan(st.session_state.get("messages", []))
tg_prompt = f"{TUTOR_PROMPT}\n\n{convo}\n\nTutor:"
resp = client.text_generation(
tg_prompt,
max_new_tokens=300,
temperature=0.2,
top_p=0.9,
repetition_penalty=1.1,
return_full_text=True,
stream=False,
)
return (resp.get("generated_text") if isinstance(resp, dict) else resp).strip()
raise # rethrow anything else
except Exception as e:
err_text = ''.join(traceback.format_exception_only(type(e), e)).strip()
raise RuntimeError(f"Hugging Face API Error: {err_text}")
# -------------------------------
# Session message helper
# -------------------------------
def add_message(text: str, sender: str):
if "messages" not in st.session_state:
st.session_state.messages = []
st.session_state.messages.append(
{
"id": str(datetime.datetime.now().timestamp()),
"text": text,
"sender": sender,
"timestamp": datetime.datetime.now()
}
)
def _coerce_ts(ts):
if isinstance(ts, datetime.datetime):
return ts
if isinstance(ts, (int, float)):
try:
return datetime.datetime.fromtimestamp(ts)
except Exception:
return None
if isinstance(ts, str):
# Try ISO 8601 first; fall back to float epoch
try:
return datetime.datetime.fromisoformat(ts)
except Exception:
try:
return datetime.datetime.fromtimestamp(float(ts))
except Exception:
return None
return None
def _normalize_messages():
msgs = st.session_state.get("messages", [])
normed = []
now = datetime.datetime.now()
for m in msgs:
text = (m.get("text") or "").strip()
sender = m.get("sender") or "user"
ts = _coerce_ts(m.get("timestamp")) or now
normed.append({**m, "text": text, "sender": sender, "timestamp": ts})
st.session_state.messages = normed
# -------------------------------
# Streamlit page
# -------------------------------
def show_page():
st.title("🤖 AI Financial Tutor")
st.caption("Get personalized help with your financial questions")
if "messages" not in st.session_state:
st.session_state.messages = [{
"id": "1",
"text": "Hi! I'm your AI Financial Tutor. What would you like to learn today?",
"sender": "assistant",
"timestamp": datetime.datetime.now()
}]
if "is_typing" not in st.session_state:
st.session_state.is_typing = False
_normalize_messages()
chat_container = st.container()
with chat_container:
for msg in st.session_state.messages:
time_str = msg["timestamp"].strftime("%H:%M") if hasattr(msg["timestamp"], "strftime") else datetime.datetime.now().strftime("%H:%M")
bubble = (
f"<div style='background-color:#e0e0e0; color:black; padding:10px; border-radius:12px; max-width:70%; margin-bottom:5px;'>"
f"{msg.get('text','')}<br><sub>{time_str}</sub></div>"
if msg.get("sender") == "assistant" else
f"<div style='background-color:#4CAF50; color:white; padding:10px; border-radius:12px; max-width:70%; margin-left:auto; margin-bottom:5px;'>"
f"{msg.get('text','')}<br><sub>{time_str}</sub></div>"
)
st.markdown(bubble, unsafe_allow_html=True)
if st.session_state.is_typing:
st.markdown("🤖 _FinanceBot is typing..._")
if len(st.session_state.messages) == 1:
st.markdown("Try asking about:")
cols = st.columns(2)
quick = [
"How does compound interest work?",
"How much should I save for emergencies?",
"What's a good budgeting strategy?",
"How do I start investing?"
]
for i, q in enumerate(quick):
if cols[i % 2].button(q):
add_message(q, "user")
st.session_state.is_typing = True
st.rerun()
user_input = st.chat_input("Ask me anything about personal finance...")
if user_input:
add_message(user_input, "user")
st.session_state.is_typing = True
st.rerun()
if st.session_state.is_typing:
try:
with st.spinner("FinanceBot is thinking..."):
bot_reply = _reply_with_hf()
add_message(bot_reply, "assistant")
except Exception as e:
add_message(f"⚠️ Error: {e}", "assistant")
finally:
st.session_state.is_typing = False
st.rerun()
if st.button("Back to Dashboard", key="ai_tutor_back_btn"):
st.session_state.current_page = "Student Dashboard"
st.rerun()