fix: use query params bridge for menubar→backend communication
Browse files
app.py
CHANGED
|
@@ -1,7 +1,11 @@
|
|
| 1 |
"""
|
| 2 |
-
|
| 3 |
-
=====================================
|
| 4 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 5 |
Run with: streamlit run app.py
|
| 6 |
"""
|
| 7 |
|
|
@@ -10,6 +14,8 @@ import streamlit as st
|
|
| 10 |
import numpy as np
|
| 11 |
from duckduckgo_search import DDGS
|
| 12 |
|
|
|
|
|
|
|
| 13 |
# ══════════════════════════════════════════════════════
|
| 14 |
LLM_MODEL = "llama-3.3-70b-versatile"
|
| 15 |
SIMILARITY_THRESHOLD = 0.78
|
|
@@ -18,509 +24,108 @@ WEB_MAX_RESULTS = 4
|
|
| 18 |
PINECONE_INDEX_NAME = "pa-memory"
|
| 19 |
NOTES_FILE = "pa_notes.json"
|
| 20 |
PROFILE_FILE = "pa_profile.json"
|
| 21 |
-
PA_NAME = "Aria"
|
| 22 |
PA_PERSONALITY = (
|
| 23 |
"You are Aria, a brilliant and warm personal AI assistant. "
|
| 24 |
"You are proactive, thoughtful, and genuinely care about helping. "
|
| 25 |
"You remember things about the user and reference them naturally. "
|
|
|
|
| 26 |
"You are concise but never cold. You feel like a real assistant, not a robot."
|
| 27 |
)
|
| 28 |
# ══════════════════════════════════════════════════════
|
| 29 |
|
| 30 |
-
st.set_page_config(
|
| 31 |
-
page_title="Aria — Personal AI",
|
| 32 |
-
page_icon="✦",
|
| 33 |
-
layout="wide",
|
| 34 |
-
initial_sidebar_state="collapsed"
|
| 35 |
-
)
|
| 36 |
|
| 37 |
-
# ── Premium CSS ───────────────────────────────────────
|
| 38 |
st.markdown("""
|
| 39 |
<style>
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
--surface: #141416;
|
| 48 |
-
--border: #222226;
|
| 49 |
-
--border2: #2a2a2f;
|
| 50 |
-
--text: #f0f0f2;
|
| 51 |
-
--muted: #666670;
|
| 52 |
-
--accent: #7c6fcd;
|
| 53 |
-
--accent2: #5bc4a0;
|
| 54 |
-
--glow: rgba(124, 111, 205, 0.15);
|
| 55 |
-
--radius: 16px;
|
| 56 |
-
}
|
| 57 |
-
|
| 58 |
-
/* Hide Streamlit chrome */
|
| 59 |
-
#MainMenu, footer, header, .stDeployButton,
|
| 60 |
-
[data-testid="stToolbar"], [data-testid="stDecoration"],
|
| 61 |
-
[data-testid="stStatusWidget"] { display: none !important; }
|
| 62 |
-
|
| 63 |
-
/* Hide sidebar toggle */
|
| 64 |
-
[data-testid="collapsedControl"] { display: none !important; }
|
| 65 |
-
|
| 66 |
-
/* App background */
|
| 67 |
-
.stApp {
|
| 68 |
-
background: var(--bg) !important;
|
| 69 |
-
font-family: 'Geist', sans-serif !important;
|
| 70 |
-
}
|
| 71 |
-
|
| 72 |
-
/* Grain overlay */
|
| 73 |
-
.stApp::before {
|
| 74 |
-
content: '';
|
| 75 |
-
position: fixed;
|
| 76 |
-
inset: 0;
|
| 77 |
-
background-image: url("data:image/svg+xml,%3Csvg viewBox='0 0 256 256' xmlns='http://www.w3.org/2000/svg'%3E%3Cfilter id='noise'%3E%3CfeTurbulence type='fractalNoise' baseFrequency='0.9' numOctaves='4' stitchTiles='stitch'/%3E%3C/filter%3E%3Crect width='100%25' height='100%25' filter='url(%23noise)' opacity='0.03'/%3E%3C/svg%3E");
|
| 78 |
-
pointer-events: none;
|
| 79 |
-
z-index: 0;
|
| 80 |
-
opacity: 0.4;
|
| 81 |
-
}
|
| 82 |
-
|
| 83 |
-
/* Ambient glow */
|
| 84 |
-
.stApp::after {
|
| 85 |
-
content: '';
|
| 86 |
-
position: fixed;
|
| 87 |
-
top: -200px;
|
| 88 |
-
left: 50%;
|
| 89 |
-
transform: translateX(-50%);
|
| 90 |
-
width: 600px;
|
| 91 |
-
height: 400px;
|
| 92 |
-
background: radial-gradient(ellipse, rgba(124,111,205,0.06) 0%, transparent 70%);
|
| 93 |
-
pointer-events: none;
|
| 94 |
-
z-index: 0;
|
| 95 |
-
}
|
| 96 |
-
|
| 97 |
-
/* Main container */
|
| 98 |
-
.main .block-container {
|
| 99 |
-
max-width: 780px !important;
|
| 100 |
-
margin: 0 auto !important;
|
| 101 |
-
padding: 0 24px 120px !important;
|
| 102 |
-
}
|
| 103 |
-
|
| 104 |
-
/* Header */
|
| 105 |
-
.aria-header {
|
| 106 |
-
display: flex;
|
| 107 |
-
align-items: center;
|
| 108 |
-
gap: 12px;
|
| 109 |
-
padding: 32px 0 8px;
|
| 110 |
-
margin-bottom: 8px;
|
| 111 |
-
}
|
| 112 |
-
|
| 113 |
-
.aria-logo {
|
| 114 |
-
width: 36px;
|
| 115 |
-
height: 36px;
|
| 116 |
-
background: linear-gradient(135deg, #7c6fcd, #5bc4a0);
|
| 117 |
-
border-radius: 10px;
|
| 118 |
-
display: flex;
|
| 119 |
-
align-items: center;
|
| 120 |
-
justify-content: center;
|
| 121 |
-
font-size: 16px;
|
| 122 |
-
flex-shrink: 0;
|
| 123 |
-
}
|
| 124 |
-
|
| 125 |
-
.aria-title {
|
| 126 |
-
font-family: 'Instrument Serif', serif !important;
|
| 127 |
-
font-size: 22px !important;
|
| 128 |
-
color: var(--text) !important;
|
| 129 |
-
font-weight: 400 !important;
|
| 130 |
-
letter-spacing: -0.3px;
|
| 131 |
-
}
|
| 132 |
-
|
| 133 |
-
.aria-subtitle {
|
| 134 |
-
font-size: 12px;
|
| 135 |
-
color: var(--muted);
|
| 136 |
-
letter-spacing: 0.05em;
|
| 137 |
-
}
|
| 138 |
-
|
| 139 |
-
/* Chat messages */
|
| 140 |
-
[data-testid="stChatMessage"] {
|
| 141 |
-
background: transparent !important;
|
| 142 |
-
border: none !important;
|
| 143 |
-
padding: 4px 0 !important;
|
| 144 |
-
gap: 14px !important;
|
| 145 |
-
}
|
| 146 |
-
|
| 147 |
-
/* User messages */
|
| 148 |
-
[data-testid="stChatMessage"][data-testid*="user"],
|
| 149 |
-
[data-testid="stChatMessage"]:has([data-testid="chatAvatarIcon-user"]) {
|
| 150 |
-
flex-direction: row-reverse !important;
|
| 151 |
-
}
|
| 152 |
-
|
| 153 |
-
[data-testid="stChatMessage"]:has([data-testid="chatAvatarIcon-user"]) .stMarkdown {
|
| 154 |
-
background: #1e1e28 !important;
|
| 155 |
-
border: 1px solid var(--border2) !important;
|
| 156 |
-
border-radius: 18px 4px 18px 18px !important;
|
| 157 |
-
padding: 12px 16px !important;
|
| 158 |
-
max-width: 80% !important;
|
| 159 |
-
margin-left: auto !important;
|
| 160 |
-
font-size: 15px !important;
|
| 161 |
-
color: var(--text) !important;
|
| 162 |
-
line-height: 1.6 !important;
|
| 163 |
-
}
|
| 164 |
-
|
| 165 |
-
/* Assistant messages */
|
| 166 |
-
[data-testid="stChatMessage"]:has([data-testid="chatAvatarIcon-assistant"]) .stMarkdown {
|
| 167 |
-
background: transparent !important;
|
| 168 |
-
border: none !important;
|
| 169 |
-
padding: 4px 0 !important;
|
| 170 |
-
font-size: 15px !important;
|
| 171 |
-
color: #d8d8e0 !important;
|
| 172 |
-
line-height: 1.75 !important;
|
| 173 |
-
max-width: 100% !important;
|
| 174 |
-
}
|
| 175 |
-
|
| 176 |
-
/* Avatar icons */
|
| 177 |
-
[data-testid="chatAvatarIcon-user"] {
|
| 178 |
-
background: linear-gradient(135deg, #2a2a35, #333340) !important;
|
| 179 |
-
border: 1px solid var(--border2) !important;
|
| 180 |
-
border-radius: 50% !important;
|
| 181 |
-
color: var(--muted) !important;
|
| 182 |
-
font-size: 13px !important;
|
| 183 |
-
width: 32px !important;
|
| 184 |
-
height: 32px !important;
|
| 185 |
-
flex-shrink: 0 !important;
|
| 186 |
-
}
|
| 187 |
-
|
| 188 |
-
[data-testid="chatAvatarIcon-assistant"] {
|
| 189 |
-
background: linear-gradient(135deg, #7c6fcd, #5bc4a0) !important;
|
| 190 |
-
border: none !important;
|
| 191 |
-
border-radius: 10px !important;
|
| 192 |
-
color: white !important;
|
| 193 |
-
font-size: 13px !important;
|
| 194 |
-
width: 32px !important;
|
| 195 |
-
height: 32px !important;
|
| 196 |
-
flex-shrink: 0 !important;
|
| 197 |
-
}
|
| 198 |
-
|
| 199 |
-
/* Chat input */
|
| 200 |
-
[data-testid="stChatInput"] {
|
| 201 |
-
position: fixed !important;
|
| 202 |
-
bottom: 0 !important;
|
| 203 |
-
left: 50% !important;
|
| 204 |
-
transform: translateX(-50%) !important;
|
| 205 |
-
width: 100% !important;
|
| 206 |
-
max-width: 780px !important;
|
| 207 |
-
padding: 16px 24px 24px !important;
|
| 208 |
-
background: linear-gradient(to top, var(--bg) 70%, transparent) !important;
|
| 209 |
-
z-index: 100 !important;
|
| 210 |
-
}
|
| 211 |
-
|
| 212 |
-
[data-testid="stChatInput"] textarea {
|
| 213 |
-
background: var(--surface) !important;
|
| 214 |
-
border: 1px solid var(--border2) !important;
|
| 215 |
-
border-radius: 14px !important;
|
| 216 |
-
color: var(--text) !important;
|
| 217 |
-
font-family: 'Geist', sans-serif !important;
|
| 218 |
-
font-size: 15px !important;
|
| 219 |
-
padding: 14px 18px !important;
|
| 220 |
-
resize: none !important;
|
| 221 |
-
transition: border-color 0.2s !important;
|
| 222 |
-
box-shadow: 0 0 0 0 transparent !important;
|
| 223 |
-
}
|
| 224 |
-
|
| 225 |
-
[data-testid="stChatInput"] textarea:focus {
|
| 226 |
-
border-color: var(--accent) !important;
|
| 227 |
-
box-shadow: 0 0 0 3px var(--glow) !important;
|
| 228 |
-
outline: none !important;
|
| 229 |
-
}
|
| 230 |
-
|
| 231 |
-
[data-testid="stChatInput"] textarea::placeholder {
|
| 232 |
-
color: var(--muted) !important;
|
| 233 |
-
}
|
| 234 |
-
|
| 235 |
-
/* Send button */
|
| 236 |
-
[data-testid="stChatInput"] button {
|
| 237 |
-
background: linear-gradient(135deg, #7c6fcd, #6a5fc0) !important;
|
| 238 |
-
border: none !important;
|
| 239 |
-
border-radius: 10px !important;
|
| 240 |
-
color: white !important;
|
| 241 |
-
transition: all 0.2s !important;
|
| 242 |
-
}
|
| 243 |
-
|
| 244 |
-
[data-testid="stChatInput"] button:hover {
|
| 245 |
-
background: linear-gradient(135deg, #8d81d8, #7c6fcd) !important;
|
| 246 |
-
transform: scale(1.05) !important;
|
| 247 |
-
}
|
| 248 |
-
|
| 249 |
-
/* Sidebar */
|
| 250 |
-
[data-testid="stSidebar"] {
|
| 251 |
-
background: var(--surface) !important;
|
| 252 |
-
border-right: 1px solid var(--border) !important;
|
| 253 |
-
}
|
| 254 |
-
|
| 255 |
-
[data-testid="stSidebar"] * {
|
| 256 |
-
font-family: 'Geist', sans-serif !important;
|
| 257 |
-
color: var(--text) !important;
|
| 258 |
-
}
|
| 259 |
-
|
| 260 |
-
/* Buttons */
|
| 261 |
-
.stButton button {
|
| 262 |
-
background: var(--surface) !important;
|
| 263 |
-
border: 1px solid var(--border2) !important;
|
| 264 |
-
border-radius: 10px !important;
|
| 265 |
-
color: var(--text) !important;
|
| 266 |
-
font-family: 'Geist', sans-serif !important;
|
| 267 |
-
font-size: 13px !important;
|
| 268 |
-
padding: 8px 16px !important;
|
| 269 |
-
transition: all 0.2s !important;
|
| 270 |
-
width: 100% !important;
|
| 271 |
-
}
|
| 272 |
-
|
| 273 |
-
.stButton button:hover {
|
| 274 |
-
border-color: var(--accent) !important;
|
| 275 |
-
background: var(--glow) !important;
|
| 276 |
-
}
|
| 277 |
-
|
| 278 |
-
/* Text inputs */
|
| 279 |
-
.stTextInput input, .stTextArea textarea {
|
| 280 |
-
background: #0f0f12 !important;
|
| 281 |
-
border: 1px solid var(--border2) !important;
|
| 282 |
-
border-radius: 10px !important;
|
| 283 |
-
color: var(--text) !important;
|
| 284 |
-
font-family: 'Geist', sans-serif !important;
|
| 285 |
-
font-size: 14px !important;
|
| 286 |
-
}
|
| 287 |
-
|
| 288 |
-
.stTextInput input:focus, .stTextArea textarea:focus {
|
| 289 |
-
border-color: var(--accent) !important;
|
| 290 |
-
box-shadow: 0 0 0 2px var(--glow) !important;
|
| 291 |
-
}
|
| 292 |
-
|
| 293 |
-
/* Labels */
|
| 294 |
-
.stTextInput label, .stTextArea label {
|
| 295 |
-
color: var(--muted) !important;
|
| 296 |
-
font-size: 12px !important;
|
| 297 |
-
letter-spacing: 0.06em !important;
|
| 298 |
-
text-transform: uppercase !important;
|
| 299 |
-
font-weight: 500 !important;
|
| 300 |
-
}
|
| 301 |
-
|
| 302 |
-
/* Caption / info */
|
| 303 |
-
.stCaption, [data-testid="stCaptionContainer"] {
|
| 304 |
-
color: var(--muted) !important;
|
| 305 |
-
font-size: 12px !important;
|
| 306 |
-
}
|
| 307 |
-
|
| 308 |
-
/* Spinner */
|
| 309 |
-
.stSpinner > div {
|
| 310 |
-
border-color: var(--accent) transparent transparent transparent !important;
|
| 311 |
-
}
|
| 312 |
-
|
| 313 |
-
/* Success / info */
|
| 314 |
-
.stSuccess, .stInfo {
|
| 315 |
-
background: rgba(91,196,160,0.08) !important;
|
| 316 |
-
border: 1px solid rgba(91,196,160,0.2) !important;
|
| 317 |
-
border-radius: 10px !important;
|
| 318 |
-
color: #5bc4a0 !important;
|
| 319 |
-
font-size: 13px !important;
|
| 320 |
-
}
|
| 321 |
-
|
| 322 |
-
/* Divider */
|
| 323 |
-
hr { border-color: var(--border) !important; }
|
| 324 |
-
|
| 325 |
-
/* Scrollbar */
|
| 326 |
-
::-webkit-scrollbar { width: 4px; }
|
| 327 |
-
::-webkit-scrollbar-track { background: transparent; }
|
| 328 |
-
::-webkit-scrollbar-thumb { background: var(--border2); border-radius: 4px; }
|
| 329 |
-
|
| 330 |
-
/* Suggestion chips */
|
| 331 |
-
.chip-row {
|
| 332 |
-
display: flex;
|
| 333 |
-
flex-wrap: wrap;
|
| 334 |
-
gap: 8px;
|
| 335 |
-
margin: 24px 0 16px;
|
| 336 |
-
}
|
| 337 |
-
.chip {
|
| 338 |
-
background: var(--surface);
|
| 339 |
-
border: 1px solid var(--border2);
|
| 340 |
-
border-radius: 100px;
|
| 341 |
-
padding: 8px 16px;
|
| 342 |
-
font-size: 13px;
|
| 343 |
-
color: var(--muted);
|
| 344 |
-
cursor: pointer;
|
| 345 |
-
transition: all 0.2s;
|
| 346 |
-
font-family: 'Geist', sans-serif;
|
| 347 |
-
white-space: nowrap;
|
| 348 |
-
}
|
| 349 |
-
.chip:hover {
|
| 350 |
-
border-color: var(--accent);
|
| 351 |
-
color: var(--text);
|
| 352 |
-
background: var(--glow);
|
| 353 |
-
}
|
| 354 |
-
|
| 355 |
-
/* Tool badge */
|
| 356 |
-
.tool-pill {
|
| 357 |
-
display: inline-flex;
|
| 358 |
-
align-items: center;
|
| 359 |
-
gap: 6px;
|
| 360 |
-
background: rgba(124,111,205,0.1);
|
| 361 |
-
border: 1px solid rgba(124,111,205,0.2);
|
| 362 |
-
border-radius: 100px;
|
| 363 |
-
padding: 4px 12px;
|
| 364 |
-
font-size: 12px;
|
| 365 |
-
color: #a99ee0;
|
| 366 |
-
margin-bottom: 8px;
|
| 367 |
-
font-family: 'Geist', sans-serif;
|
| 368 |
-
}
|
| 369 |
-
|
| 370 |
-
/* Memory badge */
|
| 371 |
-
.memory-pill {
|
| 372 |
-
display: inline-flex;
|
| 373 |
-
align-items: center;
|
| 374 |
-
gap: 6px;
|
| 375 |
-
background: rgba(91,196,160,0.08);
|
| 376 |
-
border: 1px solid rgba(91,196,160,0.2);
|
| 377 |
-
border-radius: 100px;
|
| 378 |
-
padding: 4px 12px;
|
| 379 |
-
font-size: 12px;
|
| 380 |
-
color: #5bc4a0;
|
| 381 |
-
margin-bottom: 8px;
|
| 382 |
-
}
|
| 383 |
-
|
| 384 |
-
/* Metric */
|
| 385 |
-
[data-testid="stMetric"] {
|
| 386 |
-
background: var(--surface) !important;
|
| 387 |
-
border: 1px solid var(--border) !important;
|
| 388 |
-
border-radius: 10px !important;
|
| 389 |
-
padding: 12px !important;
|
| 390 |
-
}
|
| 391 |
</style>
|
| 392 |
""", unsafe_allow_html=True)
|
| 393 |
|
| 394 |
-
|
| 395 |
-
st.
|
| 396 |
-
|
| 397 |
-
|
| 398 |
-
|
| 399 |
-
|
| 400 |
-
|
| 401 |
-
|
| 402 |
-
|
| 403 |
-
|
| 404 |
-
|
| 405 |
-
|
| 406 |
-
|
| 407 |
-
|
| 408 |
-
|
| 409 |
-
|
| 410 |
-
|
| 411 |
-
|
| 412 |
-
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
|
|
|
|
|
|
| 417 |
|
| 418 |
-
def
|
| 419 |
-
|
| 420 |
-
|
| 421 |
-
|
| 422 |
-
|
| 423 |
-
if "
|
| 424 |
-
|
| 425 |
-
if "your_name" not in st.session_state: st.session_state.your_name = saved.get("your_name", "Sterlin")
|
| 426 |
-
|
| 427 |
-
groq_key = st.session_state.groq_key
|
| 428 |
-
pinecone_key = st.session_state.pinecone_key
|
| 429 |
-
your_name = st.session_state.your_name
|
| 430 |
|
| 431 |
# ── Sidebar ───────────────────────────────────────────
|
| 432 |
with st.sidebar:
|
| 433 |
-
st.
|
| 434 |
-
|
| 435 |
-
|
| 436 |
-
|
| 437 |
-
st.success("🔐 Keys saved & active")
|
| 438 |
-
st.caption(f"Groq: gsk_••••••{groq_key[-4:]}")
|
| 439 |
-
st.caption(f"Pinecone: ••••••{pinecone_key[-4:]}")
|
| 440 |
-
st.caption(f"Name: {your_name}")
|
| 441 |
-
if st.button("🔓 Logout / Change Keys"):
|
| 442 |
-
st.session_state.groq_key = ""
|
| 443 |
-
st.session_state.pinecone_key = ""
|
| 444 |
-
st.session_state.your_name = "Sterlin"
|
| 445 |
-
clear_keys()
|
| 446 |
-
st.rerun()
|
| 447 |
-
else:
|
| 448 |
-
new_groq = st.text_input("Groq API Key", type="password", placeholder="gsk_...")
|
| 449 |
-
new_pinecone = st.text_input("Pinecone API Key", type="password", placeholder="xxxxxxxx-xxxx...")
|
| 450 |
-
new_name = st.text_input("Your Name", value="Sterlin")
|
| 451 |
-
if st.button("💾 Save & Connect"):
|
| 452 |
-
if new_groq and new_pinecone:
|
| 453 |
-
save_keys(new_groq, new_pinecone, new_name)
|
| 454 |
-
st.session_state.groq_key = new_groq
|
| 455 |
-
st.session_state.pinecone_key = new_pinecone
|
| 456 |
-
st.session_state.your_name = new_name
|
| 457 |
-
groq_key = new_groq
|
| 458 |
-
pinecone_key = new_pinecone
|
| 459 |
-
your_name = new_name
|
| 460 |
-
st.success("✅ Keys saved! You won't need to enter them again.")
|
| 461 |
-
st.rerun()
|
| 462 |
-
else:
|
| 463 |
-
st.error("Please enter both API keys.")
|
| 464 |
|
| 465 |
st.markdown("---")
|
| 466 |
-
st.markdown("
|
| 467 |
-
|
| 468 |
-
|
| 469 |
-
|
| 470 |
-
|
| 471 |
-
return {"name":"","about":"","facts":[]}
|
| 472 |
-
|
| 473 |
-
def save_profile(p):
|
| 474 |
-
json.dump(p, open(PROFILE_FILE,"w"), indent=2)
|
| 475 |
-
|
| 476 |
-
profile = load_profile()
|
| 477 |
-
about_you = st.text_area("About you", value=profile.get("about",""),
|
| 478 |
-
placeholder="I'm a developer from Chennai...", height=80)
|
| 479 |
if st.button("💾 Save Profile"):
|
| 480 |
profile["name"] = your_name
|
| 481 |
profile["about"] = about_you
|
| 482 |
save_profile(profile)
|
| 483 |
-
st.success("Profile saved!")
|
| 484 |
|
| 485 |
if profile.get("facts"):
|
| 486 |
-
st.markdown("**What
|
| 487 |
-
for
|
| 488 |
-
st.
|
| 489 |
|
| 490 |
st.markdown("---")
|
| 491 |
-
st.markdown("
|
| 492 |
-
|
| 493 |
-
|
| 494 |
-
|
| 495 |
-
st.caption(t)
|
| 496 |
|
| 497 |
st.markdown("---")
|
| 498 |
-
|
| 499 |
-
|
| 500 |
-
|
| 501 |
-
|
| 502 |
-
|
| 503 |
-
|
| 504 |
-
|
| 505 |
-
if
|
| 506 |
-
|
| 507 |
-
|
| 508 |
-
for f in [NOTES_FILE, PROFILE_FILE]:
|
| 509 |
-
if os.path.exists(f): os.remove(f)
|
| 510 |
-
st.rerun()
|
| 511 |
|
| 512 |
if not groq_key or not pinecone_key:
|
| 513 |
-
st.
|
| 514 |
-
<div class="chip-row">
|
| 515 |
-
<div class="chip">🌐 Search the web</div>
|
| 516 |
-
<div class="chip">📰 Get latest news</div>
|
| 517 |
-
<div class="chip">💱 Convert currency</div>
|
| 518 |
-
<div class="chip">🌤 Check weather</div>
|
| 519 |
-
<div class="chip">📝 Save notes</div>
|
| 520 |
-
<div class="chip">🧮 Do math</div>
|
| 521 |
-
</div>
|
| 522 |
-
""", unsafe_allow_html=True)
|
| 523 |
-
st.info("👈 Open the sidebar (top left ›) and enter your API keys to start chatting with Aria.")
|
| 524 |
st.stop()
|
| 525 |
|
| 526 |
# ── Load embedder ─────────────────────────────────────
|
|
@@ -531,10 +136,13 @@ def load_embedder():
|
|
| 531 |
|
| 532 |
embedder = load_embedder()
|
| 533 |
|
|
|
|
| 534 |
@st.cache_resource
|
| 535 |
def init_pinecone(_key):
|
| 536 |
from pinecone import Pinecone
|
| 537 |
-
|
|
|
|
|
|
|
| 538 |
|
| 539 |
try:
|
| 540 |
pc_index = init_pinecone(pinecone_key)
|
|
@@ -546,184 +154,192 @@ except Exception as e:
|
|
| 546 |
if "messages" not in st.session_state: st.session_state.messages = []
|
| 547 |
if "history" not in st.session_state: st.session_state.history = []
|
| 548 |
|
| 549 |
-
# ──
|
| 550 |
def load_notes():
|
| 551 |
return json.load(open(NOTES_FILE)) if os.path.exists(NOTES_FILE) else {}
|
| 552 |
|
| 553 |
def save_note(title, content):
|
| 554 |
-
notes = load_notes()
|
| 555 |
-
|
|
|
|
| 556 |
return f"✅ Note saved: **{title}**"
|
| 557 |
|
| 558 |
def get_note(title):
|
| 559 |
notes = load_notes()
|
| 560 |
for k, v in notes.items():
|
| 561 |
-
if title.lower() in k.lower():
|
|
|
|
| 562 |
return f"No note found for '{title}'"
|
| 563 |
|
| 564 |
-
|
| 565 |
-
p = load_profile()
|
| 566 |
-
parts = []
|
| 567 |
-
if p.get("name"): parts.append(f"User's name: {p['name']}")
|
| 568 |
-
if p.get("about"): parts.append(f"About: {p['about']}")
|
| 569 |
-
if p.get("facts"): parts.append("Known facts:\n" + "\n".join(f"- {f}" for f in p["facts"][-8:]))
|
| 570 |
-
return "\n".join(parts)
|
| 571 |
-
|
| 572 |
-
def update_profile(question):
|
| 573 |
-
keywords = ["i am","i'm","my name","i work","i like","i love","i live","i study","i'm from"]
|
| 574 |
-
if any(kw in question.lower() for kw in keywords):
|
| 575 |
-
p = load_profile()
|
| 576 |
-
if question[:150] not in p["facts"]:
|
| 577 |
-
p["facts"].append(question[:150])
|
| 578 |
-
p["facts"] = p["facts"][-20:]
|
| 579 |
-
save_profile(p)
|
| 580 |
-
|
| 581 |
def search_memory(question):
|
| 582 |
try:
|
| 583 |
-
|
|
|
|
| 584 |
if results.matches and results.matches[0].score >= SIMILARITY_THRESHOLD:
|
| 585 |
-
|
|
|
|
|
|
|
|
|
|
| 586 |
except: pass
|
| 587 |
-
return None
|
| 588 |
|
| 589 |
def store_memory(question, answer):
|
| 590 |
try:
|
| 591 |
-
pc_index.upsert(vectors=[{
|
| 592 |
-
"
|
| 593 |
-
"
|
| 594 |
-
|
|
|
|
|
|
|
| 595 |
except: pass
|
| 596 |
|
| 597 |
# ── Tools ─────────────────────────────────────────────
|
| 598 |
-
def tool_web_search(
|
| 599 |
try:
|
| 600 |
-
with DDGS() as
|
|
|
|
| 601 |
return "\n\n".join(f"[{r.get('title','')}]\n{r.get('body','')}" for r in results) or "No results."
|
| 602 |
-
except Exception as e: return f"
|
| 603 |
|
| 604 |
def tool_news(topic):
|
| 605 |
try:
|
| 606 |
-
with DDGS() as
|
| 607 |
-
|
|
|
|
| 608 |
except Exception as e: return f"News failed: {e}"
|
| 609 |
|
| 610 |
def tool_calculator(expr):
|
| 611 |
try:
|
| 612 |
-
allowed = {k: getattr(math,k) for k in dir(math) if not k.startswith("_")}
|
| 613 |
-
return f"Result: {eval(expr, {'__builtins__':{}}, allowed)}"
|
| 614 |
except Exception as e: return f"Error: {e}"
|
| 615 |
|
| 616 |
-
def tool_datetime():
|
|
|
|
| 617 |
|
| 618 |
def tool_weather(city):
|
| 619 |
-
try:
|
|
|
|
| 620 |
except Exception as e: return f"Weather failed: {e}"
|
| 621 |
|
| 622 |
-
def tool_currency(amount,
|
| 623 |
try:
|
| 624 |
-
data = requests.get(f"https://api.exchangerate-api.com/v4/latest/{
|
| 625 |
-
rate = data["rates"].get(
|
| 626 |
-
if not rate: return f"Rate not found"
|
| 627 |
-
return f"💱 {amount} {
|
| 628 |
except Exception as e: return f"Currency failed: {e}"
|
| 629 |
|
| 630 |
-
def
|
| 631 |
-
|
| 632 |
-
|
| 633 |
-
|
| 634 |
-
|
| 635 |
-
|
| 636 |
-
|
| 637 |
-
|
| 638 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 639 |
def decide_tool(question):
|
| 640 |
try:
|
| 641 |
from groq import Groq
|
| 642 |
-
|
| 643 |
-
prompt = f"""Pick ONE tool or none.
|
| 644 |
Tools: web_search(query), news(topic), calculator(expr), datetime(), weather(city),
|
| 645 |
-
currency(amount,from,to), unit_convert(value,from,to),
|
|
|
|
| 646 |
Question: "{question}"
|
| 647 |
Reply ONLY with tool call or 'none':"""
|
| 648 |
-
resp =
|
| 649 |
-
|
|
|
|
|
|
|
|
|
|
| 650 |
d = resp.choices[0].message.content.strip().lower()
|
| 651 |
-
if d.startswith("web_search("): return "🌐 Searching
|
| 652 |
-
if d.startswith("news("): return "📰 Getting
|
| 653 |
-
if d.startswith("calculator("): return "🧮 Calculating",
|
| 654 |
-
if d.startswith("datetime"): return "🕐 Checking time",
|
| 655 |
-
if d.startswith("weather("): return "🌤
|
| 656 |
if d.startswith("currency("):
|
| 657 |
p = d[9:].rstrip(")").split(",")
|
| 658 |
-
if len(p)==3: return "💱 Converting
|
| 659 |
if d.startswith("unit_convert("):
|
| 660 |
p = d[13:].rstrip(")").split(",")
|
| 661 |
-
if len(p)==3: return "📏 Converting
|
| 662 |
if d.startswith("save_note("):
|
| 663 |
p = d[10:].rstrip(")").split(",",1)
|
| 664 |
-
if len(p)==2: return "📝 Saving note", save_note(p[0].strip(),p[1].strip())
|
| 665 |
-
if d.startswith("get_note("): return "📝 Finding note",
|
| 666 |
except: pass
|
| 667 |
return None, None
|
| 668 |
|
|
|
|
| 669 |
def stream_response(question, context):
|
| 670 |
from groq import Groq
|
| 671 |
-
|
|
|
|
|
|
|
| 672 |
profile_ctx = get_profile_context()
|
| 673 |
system = PA_PERSONALITY
|
| 674 |
-
if profile_ctx:
|
| 675 |
-
|
| 676 |
-
|
| 677 |
-
|
| 678 |
-
|
| 679 |
-
|
| 680 |
-
|
| 681 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 682 |
for chunk in stream:
|
| 683 |
yield chunk.choices[0].delta.content or ""
|
| 684 |
|
| 685 |
-
# ──
|
|
|
|
| 686 |
if not st.session_state.messages:
|
| 687 |
-
welcome = f"Hi {your_name}! I'm **
|
| 688 |
-
st.session_state.messages.append({"role":"assistant","content":welcome})
|
| 689 |
-
|
| 690 |
-
# ── Suggestion chips (only on first load) ─────────────
|
| 691 |
-
if len(st.session_state.messages) == 1:
|
| 692 |
-
st.markdown("""
|
| 693 |
-
<div class="chip-row">
|
| 694 |
-
<div class="chip">🌤 Weather today</div>
|
| 695 |
-
<div class="chip">📰 Latest AI news</div>
|
| 696 |
-
<div class="chip">💱 100 USD to INR</div>
|
| 697 |
-
<div class="chip">📝 Save a note</div>
|
| 698 |
-
</div>
|
| 699 |
-
""", unsafe_allow_html=True)
|
| 700 |
-
|
| 701 |
-
# ── Render messages ───────────────────────────────────
|
| 702 |
for msg in st.session_state.messages:
|
| 703 |
with st.chat_message(msg["role"]):
|
| 704 |
st.markdown(msg["content"])
|
| 705 |
|
| 706 |
-
|
| 707 |
-
|
| 708 |
-
st.session_state.
|
| 709 |
-
st.session_state.history.append({"role":"user","content":prompt})
|
| 710 |
with st.chat_message("user"):
|
| 711 |
st.markdown(prompt)
|
| 712 |
|
| 713 |
with st.chat_message("assistant"):
|
| 714 |
-
|
| 715 |
if cached:
|
| 716 |
-
st.markdown(f'<div class="memory-pill">🧠 From memory · {score:.0%} match</div>', unsafe_allow_html=True)
|
| 717 |
st.markdown(cached)
|
| 718 |
-
st.session_state.messages.append({"role":"assistant","content":cached})
|
| 719 |
-
st.session_state.history.append({"role":"assistant","content":cached})
|
| 720 |
else:
|
| 721 |
-
with st.spinner(""):
|
| 722 |
tool_label, context = decide_tool(prompt)
|
| 723 |
if tool_label:
|
| 724 |
-
st.
|
| 725 |
response = st.write_stream(stream_response(prompt, context or ""))
|
| 726 |
store_memory(prompt, response)
|
| 727 |
-
|
| 728 |
-
st.session_state.messages.append({"role":"assistant","content":response})
|
| 729 |
-
st.session_state.history.append({"role":"assistant","content":response})
|
|
|
|
| 1 |
"""
|
| 2 |
+
Personal AI Assistant v3 — Smarter, More Personal, Human-like
|
| 3 |
+
==============================================================
|
| 4 |
+
- llama-3.3-70b (smarter model)
|
| 5 |
+
- Personal profile (learns about you)
|
| 6 |
+
- Full conversation history
|
| 7 |
+
- Custom PA name and personality
|
| 8 |
+
- Proactive suggestions
|
| 9 |
Run with: streamlit run app.py
|
| 10 |
"""
|
| 11 |
|
|
|
|
| 14 |
import numpy as np
|
| 15 |
from duckduckgo_search import DDGS
|
| 16 |
|
| 17 |
+
# ══════════════════════════════════════════════════════
|
| 18 |
+
# CONFIG
|
| 19 |
# ══════════════════════════════════════════════════════
|
| 20 |
LLM_MODEL = "llama-3.3-70b-versatile"
|
| 21 |
SIMILARITY_THRESHOLD = 0.78
|
|
|
|
| 24 |
PINECONE_INDEX_NAME = "pa-memory"
|
| 25 |
NOTES_FILE = "pa_notes.json"
|
| 26 |
PROFILE_FILE = "pa_profile.json"
|
| 27 |
+
PA_NAME = "Aria" # ← your PA's name
|
| 28 |
PA_PERSONALITY = (
|
| 29 |
"You are Aria, a brilliant and warm personal AI assistant. "
|
| 30 |
"You are proactive, thoughtful, and genuinely care about helping. "
|
| 31 |
"You remember things about the user and reference them naturally. "
|
| 32 |
+
"You occasionally make helpful suggestions the user didn't ask for. "
|
| 33 |
"You are concise but never cold. You feel like a real assistant, not a robot."
|
| 34 |
)
|
| 35 |
# ══════════════════════════════════════════════════════
|
| 36 |
|
| 37 |
+
st.set_page_config(page_title=f"{PA_NAME} — Personal AI", page_icon="✨", layout="centered")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 38 |
|
|
|
|
| 39 |
st.markdown("""
|
| 40 |
<style>
|
| 41 |
+
.stApp { background-color: #0a0a0f; color: #e2e8f0; }
|
| 42 |
+
.stChatMessage { background: #12121a; border: 1px solid #1e1e2e; border-radius: 12px; }
|
| 43 |
+
.stSidebar { background-color: #12121a; }
|
| 44 |
+
h1 { background: linear-gradient(135deg, #fff 0%, #7c3aed 60%, #06b6d4 100%);
|
| 45 |
+
-webkit-background-clip: text; -webkit-text-fill-color: transparent; }
|
| 46 |
+
.profile-box { background: rgba(124,58,237,0.08); border: 1px solid rgba(124,58,237,0.2);
|
| 47 |
+
border-radius: 10px; padding: 12px; margin: 8px 0; font-size: 13px; }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
</style>
|
| 49 |
""", unsafe_allow_html=True)
|
| 50 |
|
| 51 |
+
st.title(f"✨ {PA_NAME}")
|
| 52 |
+
st.caption("Your Personal AI Assistant")
|
| 53 |
+
|
| 54 |
+
# ── Profile ───────────────────────────────────────────
|
| 55 |
+
def load_profile():
|
| 56 |
+
if os.path.exists(PROFILE_FILE):
|
| 57 |
+
return json.load(open(PROFILE_FILE))
|
| 58 |
+
return {"name": "", "about": "", "preferences": [], "facts": []}
|
| 59 |
+
|
| 60 |
+
def save_profile(profile):
|
| 61 |
+
json.dump(profile, open(PROFILE_FILE, "w"), indent=2)
|
| 62 |
+
|
| 63 |
+
def update_profile_from_chat(question, answer):
|
| 64 |
+
"""Extract and save personal facts mentioned in conversation."""
|
| 65 |
+
profile = load_profile()
|
| 66 |
+
keywords = ["i am", "i'm", "my name is", "i work", "i like", "i love",
|
| 67 |
+
"i hate", "i live", "i study", "i'm from", "my job", "my hobby"]
|
| 68 |
+
q_lower = question.lower()
|
| 69 |
+
if any(kw in q_lower for kw in keywords):
|
| 70 |
+
fact = question[:150]
|
| 71 |
+
if fact not in profile["facts"]:
|
| 72 |
+
profile["facts"].append(fact)
|
| 73 |
+
if len(profile["facts"]) > 20:
|
| 74 |
+
profile["facts"] = profile["facts"][-20:]
|
| 75 |
+
save_profile(profile)
|
| 76 |
|
| 77 |
+
def get_profile_context():
|
| 78 |
+
profile = load_profile()
|
| 79 |
+
parts = []
|
| 80 |
+
if profile.get("name"): parts.append(f"User's name: {profile['name']}")
|
| 81 |
+
if profile.get("about"): parts.append(f"About user: {profile['about']}")
|
| 82 |
+
if profile.get("facts"): parts.append("Things I know about user:\n" + "\n".join(f"- {f}" for f in profile["facts"][-10:]))
|
| 83 |
+
return "\n".join(parts) if parts else ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
|
| 85 |
# ── Sidebar ───────────────────────────────────────────
|
| 86 |
with st.sidebar:
|
| 87 |
+
st.header(f"✨ {PA_NAME} Settings")
|
| 88 |
+
groq_key = st.text_input("Groq API Key", type="password", placeholder="gsk_...")
|
| 89 |
+
pinecone_key = st.text_input("Pinecone API Key", type="password", placeholder="xxxxxxxx...")
|
| 90 |
+
your_name = st.text_input("Your Name", value="Sterlin")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 91 |
|
| 92 |
st.markdown("---")
|
| 93 |
+
st.markdown("**👤 Your Profile**")
|
| 94 |
+
profile = load_profile()
|
| 95 |
+
about_you = st.text_area("Tell me about yourself", value=profile.get("about",""),
|
| 96 |
+
placeholder="e.g. I'm a developer from Chennai who loves AI...",
|
| 97 |
+
height=80)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 98 |
if st.button("💾 Save Profile"):
|
| 99 |
profile["name"] = your_name
|
| 100 |
profile["about"] = about_you
|
| 101 |
save_profile(profile)
|
| 102 |
+
st.success(f"Profile saved! {PA_NAME} now knows you better.")
|
| 103 |
|
| 104 |
if profile.get("facts"):
|
| 105 |
+
st.markdown("**🧠 What I know about you:**")
|
| 106 |
+
for fact in profile["facts"][-5:]:
|
| 107 |
+
st.markdown(f'<div class="profile-box">• {fact}</div>', unsafe_allow_html=True)
|
| 108 |
|
| 109 |
st.markdown("---")
|
| 110 |
+
st.markdown("**🛠 Tools**")
|
| 111 |
+
st.markdown("🌐 Web · 🧮 Math · 🌤 Weather")
|
| 112 |
+
st.markdown("📰 News · 📧 Email · 💱 Currency")
|
| 113 |
+
st.markdown("📏 Units · 📝 Notes")
|
|
|
|
| 114 |
|
| 115 |
st.markdown("---")
|
| 116 |
+
if st.button("🗑️ Clear Chat"):
|
| 117 |
+
st.session_state.messages = []
|
| 118 |
+
st.session_state.history = []
|
| 119 |
+
st.rerun()
|
| 120 |
+
if st.button("🧹 Clear All Memory"):
|
| 121 |
+
st.session_state.messages = []
|
| 122 |
+
st.session_state.history = []
|
| 123 |
+
if os.path.exists(NOTES_FILE): os.remove(NOTES_FILE)
|
| 124 |
+
if os.path.exists(PROFILE_FILE): os.remove(PROFILE_FILE)
|
| 125 |
+
st.success("All memory cleared!")
|
|
|
|
|
|
|
|
|
|
| 126 |
|
| 127 |
if not groq_key or not pinecone_key:
|
| 128 |
+
st.info(f"👈 Enter your API keys in the sidebar to start chatting with {PA_NAME}.")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 129 |
st.stop()
|
| 130 |
|
| 131 |
# ── Load embedder ─────────────────────────────────────
|
|
|
|
| 136 |
|
| 137 |
embedder = load_embedder()
|
| 138 |
|
| 139 |
+
# ── Init Pinecone ─────────────────────────────────────
|
| 140 |
@st.cache_resource
|
| 141 |
def init_pinecone(_key):
|
| 142 |
from pinecone import Pinecone
|
| 143 |
+
pc = Pinecone(api_key=_key)
|
| 144 |
+
index = pc.Index(PINECONE_INDEX_NAME)
|
| 145 |
+
return index
|
| 146 |
|
| 147 |
try:
|
| 148 |
pc_index = init_pinecone(pinecone_key)
|
|
|
|
| 154 |
if "messages" not in st.session_state: st.session_state.messages = []
|
| 155 |
if "history" not in st.session_state: st.session_state.history = []
|
| 156 |
|
| 157 |
+
# ── Notes ─────────────────────────────────────────────
|
| 158 |
def load_notes():
|
| 159 |
return json.load(open(NOTES_FILE)) if os.path.exists(NOTES_FILE) else {}
|
| 160 |
|
| 161 |
def save_note(title, content):
|
| 162 |
+
notes = load_notes()
|
| 163 |
+
notes[title] = content
|
| 164 |
+
json.dump(notes, open(NOTES_FILE, "w"), indent=2)
|
| 165 |
return f"✅ Note saved: **{title}**"
|
| 166 |
|
| 167 |
def get_note(title):
|
| 168 |
notes = load_notes()
|
| 169 |
for k, v in notes.items():
|
| 170 |
+
if title.lower() in k.lower():
|
| 171 |
+
return f"📝 **{k}**: {v}"
|
| 172 |
return f"No note found for '{title}'"
|
| 173 |
|
| 174 |
+
# ── Memory ────────────────────────────────────────────
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 175 |
def search_memory(question):
|
| 176 |
try:
|
| 177 |
+
q_emb = embedder.encode(question).tolist()
|
| 178 |
+
results = pc_index.query(vector=q_emb, top_k=1, include_metadata=True)
|
| 179 |
if results.matches and results.matches[0].score >= SIMILARITY_THRESHOLD:
|
| 180 |
+
score = results.matches[0].score
|
| 181 |
+
answer = results.matches[0].metadata.get("answer", "")
|
| 182 |
+
st.caption(f"🧠 Memory Hit — {score:.0%} confident")
|
| 183 |
+
return answer
|
| 184 |
except: pass
|
| 185 |
+
return None
|
| 186 |
|
| 187 |
def store_memory(question, answer):
|
| 188 |
try:
|
| 189 |
+
pc_index.upsert(vectors=[{
|
| 190 |
+
"id": str(uuid.uuid4()),
|
| 191 |
+
"values": embedder.encode(question).tolist(),
|
| 192 |
+
"metadata": {"question": question, "answer": answer,
|
| 193 |
+
"timestamp": datetime.datetime.now().isoformat()}
|
| 194 |
+
}])
|
| 195 |
except: pass
|
| 196 |
|
| 197 |
# ── Tools ─────────────────────────────────────────────
|
| 198 |
+
def tool_web_search(query):
|
| 199 |
try:
|
| 200 |
+
with DDGS() as ddgs:
|
| 201 |
+
results = list(ddgs.text(query, max_results=WEB_MAX_RESULTS))
|
| 202 |
return "\n\n".join(f"[{r.get('title','')}]\n{r.get('body','')}" for r in results) or "No results."
|
| 203 |
+
except Exception as e: return f"Web search failed: {e}"
|
| 204 |
|
| 205 |
def tool_news(topic):
|
| 206 |
try:
|
| 207 |
+
with DDGS() as ddgs:
|
| 208 |
+
results = list(ddgs.news(topic, max_results=5))
|
| 209 |
+
return "\n\n".join(f"📰 {r.get('title','')}\n{r.get('body','')}" for r in results) or "No news found."
|
| 210 |
except Exception as e: return f"News failed: {e}"
|
| 211 |
|
| 212 |
def tool_calculator(expr):
|
| 213 |
try:
|
| 214 |
+
allowed = {k: getattr(math, k) for k in dir(math) if not k.startswith("_")}
|
| 215 |
+
return f"Result: {eval(expr, {'__builtins__': {}}, allowed)}"
|
| 216 |
except Exception as e: return f"Error: {e}"
|
| 217 |
|
| 218 |
+
def tool_datetime():
|
| 219 |
+
return datetime.datetime.now().strftime("Today is %A, %B %d, %Y. Time: %I:%M %p")
|
| 220 |
|
| 221 |
def tool_weather(city):
|
| 222 |
+
try:
|
| 223 |
+
return requests.get(f"https://wttr.in/{city}?format=3", timeout=5).text.strip()
|
| 224 |
except Exception as e: return f"Weather failed: {e}"
|
| 225 |
|
| 226 |
+
def tool_currency(amount, from_cur, to_cur):
|
| 227 |
try:
|
| 228 |
+
data = requests.get(f"https://api.exchangerate-api.com/v4/latest/{from_cur.upper()}", timeout=5).json()
|
| 229 |
+
rate = data["rates"].get(to_cur.upper())
|
| 230 |
+
if not rate: return f"Rate not found for {to_cur}"
|
| 231 |
+
return f"💱 {amount} {from_cur.upper()} = **{float(amount)*rate:.2f} {to_cur.upper()}**"
|
| 232 |
except Exception as e: return f"Currency failed: {e}"
|
| 233 |
|
| 234 |
+
def tool_unit_converter(value, from_unit, to_unit):
|
| 235 |
+
conversions = {
|
| 236 |
+
("km","miles"):0.621371, ("miles","km"):1.60934,
|
| 237 |
+
("kg","lbs"):2.20462, ("lbs","kg"):0.453592,
|
| 238 |
+
("cm","inches"):0.393701,("inches","cm"):2.54,
|
| 239 |
+
("m","ft"):3.28084, ("ft","m"):0.3048,
|
| 240 |
+
("l","gallons"):0.264172,("gallons","l"):3.78541,
|
| 241 |
+
}
|
| 242 |
+
f, t = from_unit.lower(), to_unit.lower()
|
| 243 |
+
if (f,t) == ("c","f"): return f"🌡 {value}°C = **{float(value)*9/5+32:.1f}°F**"
|
| 244 |
+
if (f,t) == ("f","c"): return f"🌡 {value}°F = **{(float(value)-32)*5/9:.1f}°C**"
|
| 245 |
+
factor = conversions.get((f,t))
|
| 246 |
+
if factor: return f"📏 {value} {from_unit} = **{float(value)*factor:.3f} {to_unit}**"
|
| 247 |
+
return f"Can't convert {from_unit} to {to_unit}"
|
| 248 |
+
|
| 249 |
+
# ── Tool Decision ─────────────────────────────────────
|
| 250 |
def decide_tool(question):
|
| 251 |
try:
|
| 252 |
from groq import Groq
|
| 253 |
+
client = Groq(api_key=groq_key)
|
| 254 |
+
prompt = f"""Pick ONE tool or none for this question.
|
| 255 |
Tools: web_search(query), news(topic), calculator(expr), datetime(), weather(city),
|
| 256 |
+
currency(amount,from,to), unit_convert(value,from,to),
|
| 257 |
+
save_note(title,content), get_note(title), none
|
| 258 |
Question: "{question}"
|
| 259 |
Reply ONLY with tool call or 'none':"""
|
| 260 |
+
resp = client.chat.completions.create(
|
| 261 |
+
model="llama-3.1-8b-instant",
|
| 262 |
+
messages=[{"role":"user","content":prompt}],
|
| 263 |
+
max_tokens=60, temperature=0,
|
| 264 |
+
)
|
| 265 |
d = resp.choices[0].message.content.strip().lower()
|
| 266 |
+
if d.startswith("web_search("): return "🌐 Searching...", tool_web_search(d[11:].rstrip(")").strip("'\""))
|
| 267 |
+
if d.startswith("news("): return "📰 Getting news...", tool_news(d[5:].rstrip(")").strip("'\""))
|
| 268 |
+
if d.startswith("calculator("): return "🧮 Calculating...", tool_calculator(d[11:].rstrip(")").strip("'\""))
|
| 269 |
+
if d.startswith("datetime"): return "🕐 Checking time...",tool_datetime()
|
| 270 |
+
if d.startswith("weather("): return "🌤 Weather...", tool_weather(d[8:].rstrip(")").strip("'\""))
|
| 271 |
if d.startswith("currency("):
|
| 272 |
p = d[9:].rstrip(")").split(",")
|
| 273 |
+
if len(p)==3: return "💱 Converting...", tool_currency(p[0].strip(),p[1].strip(),p[2].strip())
|
| 274 |
if d.startswith("unit_convert("):
|
| 275 |
p = d[13:].rstrip(")").split(",")
|
| 276 |
+
if len(p)==3: return "📏 Converting...", tool_unit_converter(p[0].strip(),p[1].strip(),p[2].strip())
|
| 277 |
if d.startswith("save_note("):
|
| 278 |
p = d[10:].rstrip(")").split(",",1)
|
| 279 |
+
if len(p)==2: return "📝 Saving note...", save_note(p[0].strip(),p[1].strip())
|
| 280 |
+
if d.startswith("get_note("): return "📝 Finding note...", get_note(d[9:].rstrip(")").strip("'\""))
|
| 281 |
except: pass
|
| 282 |
return None, None
|
| 283 |
|
| 284 |
+
# ── Streaming ─────────────────────────────────────────
|
| 285 |
def stream_response(question, context):
|
| 286 |
from groq import Groq
|
| 287 |
+
client = Groq(api_key=groq_key)
|
| 288 |
+
|
| 289 |
+
# Build full system prompt with profile
|
| 290 |
profile_ctx = get_profile_context()
|
| 291 |
system = PA_PERSONALITY
|
| 292 |
+
if profile_ctx:
|
| 293 |
+
system += f"\n\nWhat you know about the user:\n{profile_ctx}"
|
| 294 |
+
if your_name:
|
| 295 |
+
system += f"\n\nThe user's name is {your_name}. Address them by name occasionally."
|
| 296 |
+
|
| 297 |
+
# Build messages with conversation history
|
| 298 |
+
messages = [{"role": "system", "content": system}]
|
| 299 |
+
# Add last 6 messages for context
|
| 300 |
+
for msg in st.session_state.history[-6:]:
|
| 301 |
+
messages.append(msg)
|
| 302 |
+
# Add current question with tool context
|
| 303 |
+
user_content = (f"Context from tools:\n{context}\n\n" if context else "") + question
|
| 304 |
+
messages.append({"role": "user", "content": user_content})
|
| 305 |
+
|
| 306 |
+
stream = client.chat.completions.create(
|
| 307 |
+
model=LLM_MODEL,
|
| 308 |
+
messages=messages,
|
| 309 |
+
stream=True, max_tokens=1024, temperature=0.7,
|
| 310 |
+
)
|
| 311 |
for chunk in stream:
|
| 312 |
yield chunk.choices[0].delta.content or ""
|
| 313 |
|
| 314 |
+
# ── Chat UI ───────────────────────────────────────────
|
| 315 |
+
# Welcome message on first load
|
| 316 |
if not st.session_state.messages:
|
| 317 |
+
welcome = f"Hi {your_name}! 👋 I'm **{PA_NAME}**, your personal AI assistant. I can search the web, check weather, convert currencies, save notes, and much more. What can I help you with today?"
|
| 318 |
+
st.session_state.messages.append({"role": "assistant", "content": welcome})
|
| 319 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 320 |
for msg in st.session_state.messages:
|
| 321 |
with st.chat_message(msg["role"]):
|
| 322 |
st.markdown(msg["content"])
|
| 323 |
|
| 324 |
+
if prompt := st.chat_input(f"Talk to {PA_NAME}..."):
|
| 325 |
+
st.session_state.messages.append({"role": "user", "content": prompt})
|
| 326 |
+
st.session_state.history.append({"role": "user", "content": prompt})
|
|
|
|
| 327 |
with st.chat_message("user"):
|
| 328 |
st.markdown(prompt)
|
| 329 |
|
| 330 |
with st.chat_message("assistant"):
|
| 331 |
+
cached = search_memory(prompt)
|
| 332 |
if cached:
|
|
|
|
| 333 |
st.markdown(cached)
|
| 334 |
+
st.session_state.messages.append({"role": "assistant", "content": cached})
|
| 335 |
+
st.session_state.history.append({"role": "assistant", "content": cached})
|
| 336 |
else:
|
| 337 |
+
with st.spinner(f"{PA_NAME} is thinking..."):
|
| 338 |
tool_label, context = decide_tool(prompt)
|
| 339 |
if tool_label:
|
| 340 |
+
st.caption(tool_label)
|
| 341 |
response = st.write_stream(stream_response(prompt, context or ""))
|
| 342 |
store_memory(prompt, response)
|
| 343 |
+
update_profile_from_chat(prompt, response)
|
| 344 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
| 345 |
+
st.session_state.history.append({"role": "assistant", "content": response})
|