File size: 4,917 Bytes
33e9ebc
27e2531
 
 
 
 
7f639f7
 
 
 
27e2531
 
 
 
 
d20b4bb
27e2531
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33e9ebc
27e2531
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33e9ebc
 
27e2531
 
 
8a78ecf
 
60ba620
8a78ecf
60ba620
8a78ecf
 
 
27e2531
33e9ebc
27e2531
 
 
 
 
 
 
 
 
 
 
 
 
 
032b727
 
 
 
e161377
032b727
a8513ba
27e2531
a8513ba
27e2531
a8513ba
 
27e2531
a8513ba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7d95bfd
a8513ba
7d95bfd
 
 
 
a8513ba
 
7d95bfd
 
a8513ba
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
883d61b
27e2531
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
import os
import streamlit as st
import time
from metrics_tracker import MetricsTracker
from tool_handler import run_tool  # Use PySpur-integrated run_tool function

# Disable Chroma / PostHog telemetry
os.environ["ANONYMIZED_TELEMETRY"] = "FALSE"
os.environ["CHROMA_TELEMETRY"] = "FALSE"

# Load vector DB from ZIP on startup
import embedding  

# Set page config
st.set_page_config(
    page_title="❤️ HeartBot AI",
    layout="wide",
    page_icon="\U0001F3E5",
    initial_sidebar_state="expanded"
)

# Initialize session state
if "chat_history" not in st.session_state:
    st.session_state.chat_history = []
if "metrics" not in st.session_state:
    st.session_state.metrics = MetricsTracker()

# UI layout
with st.container():
    left_col, right_col = st.columns([1, 2], gap="large")

    with left_col:
        st.markdown("## ⚙️ Settings")
        selected_model = st.radio(
            "Select Model",
            ["llama-3.3-70b-versatile", "openai/gpt-oss-120b"],
            key="model_selector",
        )
        rag_toggle = st.toggle("Enable RAG", value=True, key="rag_toggle")
        print(f"Selected Model: {selected_model}")
        print(f"RAG Enabled: {rag_toggle}")

        st.markdown("---")

        st.markdown("## 💡 Key Features")
        st.markdown(
            """
        - **BioBERT for Embeddings**
        - **Prebuilt ChromaDB Vector Store**
        - **PySpur Tools Available:**
            - Treatment Recommender
            - Symptom Cause Analyzer
            - Clinical Trial Matcher
            - Chat Memory Symptom Reasoner
        """
        )

    with right_col:
        st.markdown(
            """
            <div style='text-align: center; padding-top: 10px;'>
                <h2>❤️ HeartBot AI</h2>
                <h5 style='color: #666; margin-top: 10px;'>
                    RAG-based LLM Chatbot for Myocardial Infarction and Heart Failure Clinical Decision Support
                </h5>
            </div>
            <br><br>
            """,
            unsafe_allow_html=True,
        )

        chat_container = st.container()
        user_query = None

        # Show chat history
        for entry in st.session_state.chat_history:
            with st.chat_message("user", avatar="🧑"):
                st.markdown(entry["user"])
            if "response" in entry:
                with st.chat_message("assistant", avatar="🤖"):
                    st.markdown(entry["response"])

        user_query = st.chat_input("Ask a question...")

        # Display metrics
        st.markdown("### 📊 Metrics Summary")
        metrics = st.session_state.metrics
        st.metric("Total Queries", metrics.total_queries)
        st.metric("Successful Routed Queries", metrics.successful_routings)
        st.metric("Average Response Time (s)", f"{metrics.avg_response_time():.2f}")
        
        if user_query:

            start_time = time.time()
            print(f"[QUERY] {user_query}")
        
            st.session_state.chat_history.append({"user": user_query})
        
            try:
                response, tool_used = run_tool(
                    user_query,
                    model=selected_model,
                    use_rag=rag_toggle
                )
                routed_correctly = True
            except Exception as e:
                response = f"Error handling query: {str(e)}"
                tool_used = "ERROR"
                routed_correctly = False
        
            end_time = time.time()
            response_time = end_time - start_time
        
            # metrics logging

            st.session_state.metrics.record_query(
                query=user_query,
                model=selected_model,
                use_rag=rag_toggle,
                tool_name=tool_used,
                routed_correctly=routed_correctly,
                response_time=response_time
        )

        
            # structured routing log
            print(
                    f"""
            === ROUTING EVENT ===
            Query: {user_query}
            Model: {selected_model}
            RAG Enabled: {rag_toggle}
            Tool Used: {tool_used}
            Response Time: {response_time:.2f}s
            Status: {'SUCCESS' if routed_correctly else 'FAILED'}
            ====================
            """
                )
        
            st.session_state.chat_history[-1]["response"] = response
            st.rerun()


        # Clear button
        if st.button("Clear Chat History"):
            st.session_state.chat_history = []
            print("Chat history cleared by the user.")
            st.session_state.metrics = MetricsTracker()
            st.rerun()

# Sidebar styling
st.markdown("""
<style>
    section[data-testid="stSidebar"] > div:first-child {
        border-right: 2px solid #ddd;
        padding-right: 1rem;
    }
</style>
""", unsafe_allow_html=True)