|
|
import os |
|
|
import streamlit as st |
|
|
import time |
|
|
from metrics_tracker import MetricsTracker |
|
|
from tool_handler import run_tool |
|
|
|
|
|
|
|
|
os.environ["ANONYMIZED_TELEMETRY"] = "FALSE" |
|
|
os.environ["CHROMA_TELEMETRY"] = "FALSE" |
|
|
|
|
|
|
|
|
import embedding |
|
|
|
|
|
|
|
|
st.set_page_config( |
|
|
page_title="❤️ HeartBot AI", |
|
|
layout="wide", |
|
|
page_icon="\U0001F3E5", |
|
|
initial_sidebar_state="expanded" |
|
|
) |
|
|
|
|
|
|
|
|
if "chat_history" not in st.session_state: |
|
|
st.session_state.chat_history = [] |
|
|
if "metrics" not in st.session_state: |
|
|
st.session_state.metrics = MetricsTracker() |
|
|
|
|
|
|
|
|
with st.container(): |
|
|
left_col, right_col = st.columns([1, 2], gap="large") |
|
|
|
|
|
with left_col: |
|
|
st.markdown("## ⚙️ Settings") |
|
|
selected_model = st.radio( |
|
|
"Select Model", |
|
|
["llama-3.3-70b-versatile", "openai/gpt-oss-120b"], |
|
|
key="model_selector", |
|
|
) |
|
|
rag_toggle = st.toggle("Enable RAG", value=True, key="rag_toggle") |
|
|
print(f"Selected Model: {selected_model}") |
|
|
print(f"RAG Enabled: {rag_toggle}") |
|
|
|
|
|
st.markdown("---") |
|
|
|
|
|
st.markdown("## 💡 Key Features") |
|
|
st.markdown( |
|
|
""" |
|
|
- **BioBERT for Embeddings** |
|
|
- **Prebuilt ChromaDB Vector Store** |
|
|
- **PySpur Tools Available:** |
|
|
- Treatment Recommender |
|
|
- Symptom Cause Analyzer |
|
|
- Clinical Trial Matcher |
|
|
- Chat Memory Symptom Reasoner |
|
|
""" |
|
|
) |
|
|
|
|
|
with right_col: |
|
|
st.markdown( |
|
|
""" |
|
|
<div style='text-align: center; padding-top: 10px;'> |
|
|
<h2>❤️ HeartBot AI</h2> |
|
|
</div> |
|
|
<br><br> |
|
|
""", |
|
|
unsafe_allow_html=True, |
|
|
) |
|
|
|
|
|
chat_container = st.container() |
|
|
user_query = None |
|
|
|
|
|
|
|
|
for entry in st.session_state.chat_history: |
|
|
with st.chat_message("user", avatar="🧑"): |
|
|
st.markdown(entry["user"]) |
|
|
if "response" in entry: |
|
|
with st.chat_message("assistant", avatar="🤖"): |
|
|
st.markdown(entry["response"]) |
|
|
|
|
|
user_query = st.chat_input("Ask a question...") |
|
|
|
|
|
|
|
|
st.markdown("### 📊 Metrics Summary") |
|
|
metrics = st.session_state.metrics |
|
|
st.metric("Total Queries", metrics.total_queries) |
|
|
st.metric("Successful Routed Queries", metrics.successful_routings) |
|
|
st.metric("Average Response Time (s)", f"{metrics.avg_response_time():.2f}") |
|
|
|
|
|
if user_query: |
|
|
|
|
|
start_time = time.time() |
|
|
print(f"[QUERY] {user_query}") |
|
|
|
|
|
st.session_state.chat_history.append({"user": user_query}) |
|
|
|
|
|
try: |
|
|
response, tool_used = run_tool( |
|
|
user_query, |
|
|
model=selected_model, |
|
|
use_rag=rag_toggle |
|
|
) |
|
|
routed_correctly = True |
|
|
except Exception as e: |
|
|
response = f"Error handling query: {str(e)}" |
|
|
tool_used = "ERROR" |
|
|
routed_correctly = False |
|
|
|
|
|
end_time = time.time() |
|
|
response_time = end_time - start_time |
|
|
|
|
|
|
|
|
|
|
|
st.session_state.metrics.record_query( |
|
|
query=user_query, |
|
|
model=selected_model, |
|
|
use_rag=rag_toggle, |
|
|
tool_name=tool_used, |
|
|
routed_correctly=routed_correctly, |
|
|
response_time=response_time |
|
|
) |
|
|
|
|
|
|
|
|
|
|
|
print( |
|
|
f""" |
|
|
=== ROUTING EVENT === |
|
|
Query: {user_query} |
|
|
Model: {selected_model} |
|
|
RAG Enabled: {rag_toggle} |
|
|
Tool Used: {tool_used} |
|
|
Response Time: {response_time:.2f}s |
|
|
Status: {'SUCCESS' if routed_correctly else 'FAILED'} |
|
|
==================== |
|
|
""" |
|
|
) |
|
|
|
|
|
st.session_state.chat_history[-1]["response"] = response |
|
|
st.rerun() |
|
|
|
|
|
|
|
|
|
|
|
if st.button("Clear Chat History"): |
|
|
st.session_state.chat_history = [] |
|
|
print("Chat history cleared by the user.") |
|
|
st.session_state.metrics = MetricsTracker() |
|
|
st.rerun() |
|
|
|
|
|
|
|
|
st.markdown(""" |
|
|
<style> |
|
|
section[data-testid="stSidebar"] > div:first-child { |
|
|
border-right: 2px solid #ddd; |
|
|
padding-right: 1rem; |
|
|
} |
|
|
</style> |
|
|
""", unsafe_allow_html=True) |