Shubham170793 commited on
Commit
b8ced0e
Β·
verified Β·
1 Parent(s): c8ee8ff

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +262 -174
src/streamlit_app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import os
2
  import re
3
  import shutil
@@ -5,34 +6,49 @@ import streamlit as st
5
  import torch
6
 
7
  # ==========================================================
8
- # βœ… Environment Setup
9
  # ==========================================================
10
- st.set_page_config(page_title="Enterprise Knowledge Assistant", layout="wide")
11
  print("CUDA available:", torch.cuda.is_available())
 
 
 
 
 
 
 
 
 
 
 
 
12
 
13
  # ==========================================================
14
- # βš™οΈ Hugging Face Cache Setup
15
  # ==========================================================
16
  CACHE_DIR = "/tmp/hf_cache"
17
  os.makedirs(CACHE_DIR, exist_ok=True)
18
- os.environ.update({
19
- "HF_HOME": CACHE_DIR,
20
- "TRANSFORMERS_CACHE": CACHE_DIR,
21
- "HF_DATASETS_CACHE": CACHE_DIR,
22
- "HF_MODULES_CACHE": CACHE_DIR
23
- })
 
 
24
 
25
  # ==========================================================
26
- # πŸ“¦ Imports
27
  # ==========================================================
 
28
  from ingestion import extract_text_from_pdf, chunk_text
29
  from vectorstore import build_faiss_index
30
  from qa import retrieve_chunks, generate_answer, cache_embeddings, embed_chunks, genai_generate
31
 
32
  # ==========================================================
33
- # 🧠 Suggestion Generator
34
  # ==========================================================
35
  def generate_dynamic_suggestions_from_toc(toc, chunks, doc_name="Document"):
 
36
  if not toc or not chunks:
37
  return []
38
  titles = []
@@ -44,16 +60,17 @@ def generate_dynamic_suggestions_from_toc(toc, chunks, doc_name="Document"):
44
 
45
  context_sample = " ".join(chunks[:3])[:4000]
46
  prompt = f"""
47
- You are generating concise, document-specific questions for "{doc_name}".
48
- Use this TOC and sample text as context.
49
 
50
- TOC:
51
  {chr(10).join(['- ' + t for t in titles[:8]])}
52
 
53
  TEXT SAMPLE:
54
  {context_sample}
55
 
56
- Generate 5–7 short, document-based questions under 18 words.
 
57
  """
58
 
59
  try:
@@ -67,187 +84,258 @@ def generate_dynamic_suggestions_from_toc(toc, chunks, doc_name="Document"):
67
  final.append(q)
68
  return final[:7]
69
  except Exception:
70
- return ["What is this document about?", "How do I use this guide?"]
71
-
72
- # ==========================================================
73
- # 🎨 Styling
74
- # ==========================================================
75
- st.markdown("""
76
- <style>
77
- div.block-container { padding-top: 1rem; max-width: 1100px; }
78
- h2, h3, h4 { color: #f3f4f6; font-weight: 600; }
79
- h3 { font-size: 1.1rem; margin-bottom: 0.4rem; }
80
- .status-line {
81
- background: #0f172a;
82
- border-left: 4px solid #10b981;
83
- border-radius: 6px;
84
- padding: 8px 14px;
85
- color: #d1fae5;
86
- margin-bottom: 0.6rem;
87
- }
88
- .suggest-chip {
89
- background-color: #111827;
90
- border: 1px solid #272b33;
91
- border-radius: 14px;
92
- color: #cbd5e1;
93
- padding: 5px 10px;
94
- cursor: pointer;
95
- font-size: 12.5px;
96
- transition: all 0.2s ease-in-out;
97
- margin: 3px 3px 3px 0;
98
- display: inline-block;
99
- }
100
- .suggest-chip:hover {
101
- background-color: #2563eb;
102
- border-color: #3b82f6;
103
- color: white;
104
- box-shadow: 0 0 6px rgba(59,130,246,0.4);
105
- }
106
- .stTextInput > div > div > input {
107
- background-color: #0f172a;
108
- color: #f1f5f9;
109
- border-radius: 6px;
110
- border: 1px solid #334155;
111
- padding: 8px 12px;
112
- font-size: 14px;
113
- }
114
- .answer-box {
115
- background: linear-gradient(135deg, #0f172a, #1e293b);
116
- border-left: 4px solid #3b82f6;
117
- border-radius: 8px;
118
- padding: 16px 18px;
119
- color: #f1f5f9;
120
- margin-top: 0.5rem;
121
- }
122
- section.ask-block { margin-top: 0.5rem; margin-bottom: 0.2rem; }
123
- section.answer-block { margin-top: 0.2rem; }
124
- </style>
125
- """, unsafe_allow_html=True)
126
-
127
- # ==========================================================
128
- # 🧭 Sidebar (simplified)
129
  # ==========================================================
130
- with st.sidebar:
131
- st.markdown("### 🧠 Response Mode")
132
- mode = st.radio("", ["Strict (Document-only)", "Extended (Document + general)"], index=0)
133
- reasoning_mode = mode.startswith("Extended")
134
- st.caption("Strict = answers only from the document. Extended = may include helpful general info.")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
 
136
- with st.expander("βš™οΈ Advanced Settings (for developers)", expanded=False):
137
- chunk_size = st.slider("Chunk Size", 200, 1500, 1000, step=50)
138
- overlap = st.slider("Chunk Overlap", 50, 200, 120, step=10)
139
- top_k = st.slider("Top K Results", 1, 10, 5)
 
 
 
 
140
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
  st.markdown("---")
142
  st.caption("✨ Built by Shubham Sharma")
143
 
144
  # ==========================================================
145
- # πŸ“„ State Initialization
146
  # ==========================================================
147
- for key, default in {
148
- "user_query_input": "",
149
- "selected_suggestion": None,
150
- "show_more": False,
151
- "last_doc": None,
152
- "query_suggestions": [],
153
- }.items():
154
- if key not in st.session_state:
155
- st.session_state[key] = default
 
 
 
 
 
 
156
 
157
- def select_suggestion(q):
158
- st.session_state.user_query_input = q
159
- st.session_state.selected_suggestion = q
160
 
161
  # ==========================================================
162
- # πŸ“˜ Main Layout
163
  # ==========================================================
164
- st.title("Enterprise Knowledge Assistant")
165
- st.caption("Query SAP documentation and enterprise PDFs β€” powered by reasoning and retrieval.")
166
 
167
- doc_choice = st.radio("Select a document:", ["-- Select --", "Sample PDF", "Upload Custom PDF"], index=0)
168
- text, chunks, index, embeddings, toc = None, None, None, None, None
 
 
 
 
169
 
 
170
  if doc_choice == "-- Select --":
171
- st.info("⬅️ Select a document to begin.")
172
  else:
173
  if doc_choice == "Sample PDF":
174
  temp_path = os.path.join(os.path.dirname(__file__), "sample.pdf")
175
- st.markdown("<div class='status-line'>πŸ“˜ Using built-in Sample PDF β€” ready to query below.</div>", unsafe_allow_html=True)
176
  else:
177
  uploaded_file = st.file_uploader("πŸ“‚ Upload your PDF", type="pdf")
178
  if uploaded_file:
179
  temp_path = os.path.join("/tmp", uploaded_file.name)
180
  with open(temp_path, "wb") as f:
181
  f.write(uploaded_file.getbuffer())
182
- st.markdown(f"<div class='status-line'>βœ… '{uploaded_file.name}' uploaded successfully β€” ready to query below.</div>", unsafe_allow_html=True)
183
- else:
184
- temp_path = None
185
-
186
- if temp_path:
187
- with st.spinner("πŸ” Processing your document..."):
188
- text, toc = extract_text_from_pdf(temp_path)
189
- chunks = chunk_text(text, chunk_size=chunk_size)
190
-
191
- # βœ… Only generate suggestions once per document
192
- if st.session_state.get("last_doc") != os.path.basename(temp_path):
193
- st.session_state["query_suggestions"] = generate_dynamic_suggestions_from_toc(
194
- toc, chunks, os.path.basename(temp_path)
195
- )
196
- st.session_state["last_doc"] = os.path.basename(temp_path)
197
-
198
- query_suggestions = st.session_state["query_suggestions"]
199
-
200
- with st.spinner("βš™οΈ Building FAISS index..."):
201
- embeddings = cache_embeddings(os.path.basename(temp_path), chunks, embed_chunks)
202
- index = build_faiss_index(embeddings)
203
-
204
- # ----------------------------------------------------------
205
- # πŸ’¬ Ask a Question
206
- # ----------------------------------------------------------
207
- st.markdown("<section class='ask-block'>", unsafe_allow_html=True)
208
- st.subheader("Ask the Assistant")
209
-
210
- if query_suggestions:
211
- visible = query_suggestions if st.session_state.show_more else query_suggestions[:3]
212
- cols = st.columns(min(3, len(visible)))
213
- for i, q in enumerate(visible):
214
- if cols[i % 3].button(f"πŸ” {q}", key=f"sugg_{i}", on_click=select_suggestion, args=(q,)):
215
- pass
216
-
217
- toggle_text = "Show all sample questions β–²" if st.session_state.show_more else "Show all sample questions β–Ό"
218
- if st.button(toggle_text, key="toggle_sugg"):
219
- st.session_state.show_more = not st.session_state.show_more
220
- st.experimental_rerun()
221
-
222
- user_query = st.text_input("Type your question or click one above:", key="user_query_input")
223
- st.markdown("</section>", unsafe_allow_html=True)
224
-
225
- # ----------------------------------------------------------
226
- # πŸ€– Assistant
227
- # ----------------------------------------------------------
228
- if user_query.strip():
229
- with st.spinner("πŸ’­ Generating response..."):
230
- retrieved = retrieve_chunks(user_query, index, chunks, top_k=top_k, embeddings=embeddings)
231
- answer = generate_answer(user_query, retrieved, reasoning_mode=reasoning_mode)
232
-
233
- st.markdown("<section class='answer-block'>", unsafe_allow_html=True)
234
- st.subheader("Assistant")
235
- st.markdown(f"<div class='answer-box'>πŸ’‘ {answer}</div>", unsafe_allow_html=True)
236
- st.markdown("</section>", unsafe_allow_html=True)
237
-
238
- with st.expander("🧩 See Source Passages"):
239
- for i, r in enumerate(retrieved, start=1):
240
- st.markdown(f"**Chunk {i}:** {r}")
241
-
242
- # ----------------------------------------------------------
243
- # πŸ“š Explore Document
244
- # ----------------------------------------------------------
245
- with st.expander("πŸ“– View Original Document Content"):
246
- if toc:
247
- st.markdown("**Table of Contents**")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
248
  toc_text = "\n".join([f"{sec}. {title}" for sec, title in toc])
249
- st.text_area("", toc_text, height=150)
250
- if chunks:
251
- st.markdown("**Extracted Text Preview**")
252
- st.text_area("", text[:1000], height=150)
253
- st.caption(f"{len(chunks)} chunks processed.")
 
1
+ # streamlit_app.py
2
  import os
3
  import re
4
  import shutil
 
6
  import torch
7
 
8
  # ==========================================================
9
+ # βœ… Environment Diagnostics
10
  # ==========================================================
 
11
  print("CUDA available:", torch.cuda.is_available())
12
+ if torch.cuda.is_available():
13
+ try:
14
+ print("GPU:", torch.cuda.get_device_name(0))
15
+ except Exception:
16
+ pass
17
+ else:
18
+ print("Running on CPU")
19
+
20
+ # ==========================================================
21
+ # βœ… Page Configuration
22
+ # ==========================================================
23
+ st.set_page_config(page_title="Enterprise Knowledge Assistant", layout="wide")
24
 
25
  # ==========================================================
26
+ # βš™οΈ Hugging Face Cache Configuration (non-destructive)
27
  # ==========================================================
28
  CACHE_DIR = "/tmp/hf_cache"
29
  os.makedirs(CACHE_DIR, exist_ok=True)
30
+ os.environ.update(
31
+ {
32
+ "HF_HOME": CACHE_DIR,
33
+ "TRANSFORMERS_CACHE": CACHE_DIR,
34
+ "HF_DATASETS_CACHE": CACHE_DIR,
35
+ "HF_MODULES_CACHE": CACHE_DIR,
36
+ }
37
+ )
38
 
39
  # ==========================================================
40
+ # πŸ“¦ Imports AFTER environment setup (your modules)
41
  # ==========================================================
42
+ # These should be your existing modules β€” unchanged
43
  from ingestion import extract_text_from_pdf, chunk_text
44
  from vectorstore import build_faiss_index
45
  from qa import retrieve_chunks, generate_answer, cache_embeddings, embed_chunks, genai_generate
46
 
47
  # ==========================================================
48
+ # 🧠 Smart suggestion generator (TOC-based; unchanged semantics)
49
  # ==========================================================
50
  def generate_dynamic_suggestions_from_toc(toc, chunks, doc_name="Document"):
51
+ """Generate short, doc-focused suggestion questions from a TOC"""
52
  if not toc or not chunks:
53
  return []
54
  titles = []
 
60
 
61
  context_sample = " ".join(chunks[:3])[:4000]
62
  prompt = f"""
63
+ You are generating concise, context-aware questions based on the document "{doc_name}".
64
+ Use this Table of Contents and sample content for inspiration.
65
 
66
+ TABLE OF CONTENTS:
67
  {chr(10).join(['- ' + t for t in titles[:8]])}
68
 
69
  TEXT SAMPLE:
70
  {context_sample}
71
 
72
+ Generate 5–7 questions that are short, relevant, and strictly document-based.
73
+ Each question should be under 18 words.
74
  """
75
 
76
  try:
 
84
  final.append(q)
85
  return final[:7]
86
  except Exception:
87
+ # safe fallback (minimal, doc-driven)
88
+ return ["What is this document about?", "How do I start using this process?"]
89
+
90
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  # ==========================================================
92
+ # 🎨 Global CSS / UI polish (no functional changes)
93
+ # ==========================================================
94
+ st.markdown(
95
+ """
96
+ <style>
97
+ /* container width and heading style */
98
+ div.block-container { padding-top:1.2rem; max-width:1100px; }
99
+ h1 { font-weight:700; color: #f8fafc; }
100
+ h2, h3 { color:#f1f5f9 }
101
+
102
+ /* Upload / success card */
103
+ .upload-card {
104
+ background: linear-gradient(90deg,#0f1724,#0b1220);
105
+ border-radius:10px;
106
+ padding:12px 16px;
107
+ color:#e6eef8;
108
+ border:1px solid rgba(59,130,246,0.06);
109
+ margin-top:10px;
110
+ }
111
+
112
+ /* Suggestion chips */
113
+ .suggest-chip {
114
+ background-color: #0f1724;
115
+ border: 1px solid #374151;
116
+ border-radius: 14px;
117
+ color: #e6eef8;
118
+ padding: 8px 12px;
119
+ cursor: pointer;
120
+ font-size: 13px;
121
+ margin:6px 6px 10px 0;
122
+ display:inline-block;
123
+ transition: all 0.15s ease-in-out;
124
+ max-width: 360px;
125
+ text-align:left;
126
+ }
127
+ .suggest-chip:hover { transform: translateY(-2px); box-shadow: 0 6px 18px rgba(15,23,42,0.35); }
128
+ .suggest-chip.selected {
129
+ border-color: #3b82f6;
130
+ background: linear-gradient(90deg,#13325a,#0f2a4d);
131
+ color: #eaf2ff;
132
+ box-shadow: 0 8px 20px rgba(59,130,246,0.12);
133
+ }
134
 
135
+ /* Input styling */
136
+ .stTextInput > div > div > input {
137
+ background-color:#0b1220 !important;
138
+ color:#e6eef8 !important;
139
+ border-radius:6px !important;
140
+ border:1px solid #273244 !important;
141
+ padding:10px !important;
142
+ }
143
 
144
+ /* Answer box */
145
+ .answer-box {
146
+ background: linear-gradient(180deg,#0b1220,#071027);
147
+ border-left: 4px solid #3b82f6;
148
+ border-radius: 8px;
149
+ padding: 14px 16px;
150
+ color: #e6eef8;
151
+ margin-top: 12px;
152
+ box-shadow: 0 8px 30px rgba(2,6,23,0.6);
153
+ }
154
+ .answer-header {
155
+ font-weight:600; font-size:0.95rem; margin-bottom:8px; color:#dbeafe;
156
+ }
157
+
158
+ /* mini loader dots used in place of plain text spinner */
159
+ .dot-loader span { display:inline-block; width:6px; height:6px; margin:0 3px; background:#94a3b8; border-radius:50%; animation: dot 1s infinite linear; }
160
+ .dot-loader span:nth-child(2){ animation-delay:0.12s }
161
+ .dot-loader span:nth-child(3){ animation-delay:0.24s }
162
+ @keyframes dot {
163
+ 0% { transform: translateY(0); opacity:0.3 }
164
+ 50% { transform: translateY(-6px); opacity:1 }
165
+ 100% { transform: translateY(0); opacity:0.3 }
166
+ }
167
+
168
+ /* subtle expander header styling */
169
+ .streamlit-expanderHeader { color:#e6eef8 !important; }
170
+ </style>
171
+ """,
172
+ unsafe_allow_html=True,
173
+ )
174
+
175
+ # ==========================================================
176
+ # 🧭 Sidebar β€” keep concise settings for consumers
177
+ # ==========================================================
178
+ with st.sidebar:
179
+ st.markdown("### 🧭 Response Mode")
180
+ # default to Strict (document-only) for first-time users
181
+ mode = st.radio(
182
+ "",
183
+ ("Strict (Document-only)", "Extended (Document + general)"),
184
+ index=0,
185
+ help="Strict = answers only from the document. Extended = may include helpful general info.",
186
+ )
187
+
188
+ st.markdown("---")
189
+ if st.checkbox("Show advanced settings (for devs)", value=False):
190
+ st.markdown("### Developer Settings")
191
+ # keep internal knobs but hide by default
192
+ _chunk_size = st.slider("Chunk Size (chars)", 200, 1500, 1000, step=50)
193
+ _overlap = st.slider("Chunk overlap (chars)", 50, 300, 120, step=10)
194
+ _topk = st.slider("Top K Results", 1, 10, 5)
195
+ else:
196
+ # expose simple slider for users (but not all dev knobs)
197
+ _chunk_size = st.slider("Chunk Size", 200, 1500, 1000, step=50)
198
+ _overlap = 120
199
+ _topk = st.slider("Top K Results", 1, 10, 5)
200
  st.markdown("---")
201
  st.caption("✨ Built by Shubham Sharma")
202
 
203
  # ==========================================================
204
+ # πŸ—‚ Initialize session state keys (prevent widget warnings)
205
  # ==========================================================
206
+ if "user_query_input" not in st.session_state:
207
+ st.session_state["user_query_input"] = ""
208
+ if "show_more" not in st.session_state:
209
+ st.session_state["show_more"] = False
210
+ if "selected_suggestion" not in st.session_state:
211
+ st.session_state["selected_suggestion"] = None
212
+ if "last_doc_basename" not in st.session_state:
213
+ st.session_state["last_doc_basename"] = None
214
+
215
+ # Helper: set query when a suggestion is clicked (keeps things simple)
216
+ def handle_suggestion_click(q: str, idx: int):
217
+ st.session_state["user_query_input"] = q
218
+ st.session_state["selected_suggestion"] = idx
219
+ # Immediately rerun so user sees query filled and answer generated
220
+ st.experimental_rerun()
221
 
 
 
 
222
 
223
  # ==========================================================
224
+ # πŸ“„ Main application flow
225
  # ==========================================================
226
+ st.title("πŸ“„ Enterprise Knowledge Assistant")
227
+ st.caption("Query SAP documentation and enterprise PDFs β€” powered by reasoning + retrieval.")
228
 
229
+ # Document select/upload
230
+ doc_choice = st.radio(
231
+ "Select a document:",
232
+ ("-- Select --", "Sample PDF", "Upload Custom PDF"),
233
+ index=0,
234
+ )
235
 
236
+ temp_path = None
237
  if doc_choice == "-- Select --":
238
+ st.info("⬅️ Please select a document from above to begin.")
239
  else:
240
  if doc_choice == "Sample PDF":
241
  temp_path = os.path.join(os.path.dirname(__file__), "sample.pdf")
 
242
  else:
243
  uploaded_file = st.file_uploader("πŸ“‚ Upload your PDF", type="pdf")
244
  if uploaded_file:
245
  temp_path = os.path.join("/tmp", uploaded_file.name)
246
  with open(temp_path, "wb") as f:
247
  f.write(uploaded_file.getbuffer())
248
+ # success card
249
+ st.markdown(
250
+ f"<div class='upload-card'>βœ… <b>{uploaded_file.name}</b> uploaded successfully β€” ready to query below.</div>",
251
+ unsafe_allow_html=True,
252
+ )
253
+
254
+ # If sample chosen, show small card to confirm (non-intrusive)
255
+ if doc_choice == "Sample PDF" and temp_path:
256
+ st.markdown(
257
+ "<div class='upload-card'>πŸ“˜ Using built-in Sample PDF.</div>", unsafe_allow_html=True
258
+ )
259
+
260
+ # Only proceed when we have a path
261
+ if temp_path:
262
+ # Process & index (unchanged)
263
+ with st.spinner("πŸ” Processing document..."):
264
+ text, toc = extract_text_from_pdf(temp_path)
265
+ chunks = chunk_text(text, chunk_size=_chunk_size if " _chunk_size" in locals() else 1000)
266
+
267
+ # Prepare embeddings & index (use caching as before)
268
+ with st.spinner("βš™οΈ Preparing search index..."):
269
+ embeddings = cache_embeddings(os.path.basename(temp_path), chunks, embed_chunks)
270
+ index = build_faiss_index(embeddings)
271
+
272
+ # Only generate suggestions once per uploaded document during a session
273
+ basename = os.path.basename(temp_path)
274
+ if st.session_state.get("last_doc_basename") != basename:
275
+ # generate suggestions
276
+ query_suggestions = generate_dynamic_suggestions_from_toc(toc, chunks, basename)
277
+ st.session_state["query_suggestions_fixed"] = query_suggestions
278
+ st.session_state["last_doc_basename"] = basename
279
+ st.session_state["selected_suggestion"] = None
280
+ else:
281
+ query_suggestions = st.session_state.get("query_suggestions_fixed", [])
282
+
283
+ # ----------------------------------------------------------
284
+ # πŸ’¬ Ask a Question UI
285
+ # ----------------------------------------------------------
286
+ st.markdown("### Ask the Assistant")
287
+
288
+ # Show suggestions as styled chips β€” fixed for the doc during session
289
+ if query_suggestions:
290
+ visible = query_suggestions if st.session_state["show_more"] else query_suggestions[:3]
291
+ # render chips inline in a single column
292
+ chip_container = st.container()
293
+ for i, q in enumerate(visible):
294
+ # create a simple html chip and a real button for the click behaviour
295
+ selected = st.session_state.get("selected_suggestion") == i
296
+ chip_class = "suggest-chip selected" if selected else "suggest-chip"
297
+ # create horizontal layout: use columns to space them evenly
298
+ cols = chip_container.columns(3)
299
+ col = cols[i % 3]
300
+ # button triggers state change (use the same label so users can also press the button)
301
+ if col.button(f"πŸ” {q}", key=f"sugg_btn_{i}"):
302
+ handle_suggestion_click(q, i)
303
+ # render chip visually (non-interactive decoration)
304
+ col.markdown(f"<div class='{chip_class}'>{q}</div>", unsafe_allow_html=True)
305
+
306
+ # Show more / less toggle
307
+ toggle_text = "Show less β–²" if st.session_state["show_more"] else "Show more β–Ό"
308
+ if st.button(toggle_text, key="toggle_more_suggestions"):
309
+ st.session_state["show_more"] = not st.session_state["show_more"]
310
+ st.experimental_rerun()
311
+
312
+ # Type input β€” rely exclusively on session_state key (pre-initialized above) to avoid warnings
313
+ user_query = st.text_input("Type your question or click one above:", key="user_query_input")
314
+
315
+ # When a query is filled (either typed or from click), generate answer
316
+ if user_query and user_query.strip():
317
+ # show a friendly loader animation while generating (non-invasive)
318
+ st.markdown("<div class='dot-loader'><span></span><span></span><span></span></div>", unsafe_allow_html=True)
319
+ # retrieval + answer generation (same semantics as before)
320
+ retrieved = retrieve_chunks(user_query, index, chunks, top_k=_topk if "_topk" in locals() else _topk)
321
+ answer = generate_answer(user_query, retrieved, reasoning_mode=(mode.startswith("Extended")))
322
+ # answer card with header (UI only)
323
+ st.markdown("### Assistant")
324
+ st.markdown(
325
+ f"<div class='answer-box'><div class='answer-header'>Assistant’s Response</div>{answer}</div>",
326
+ unsafe_allow_html=True,
327
+ )
328
+
329
+ # supporting context expander (keeps collapsed by default)
330
+ with st.expander("πŸ“„ Supporting Context (source passages)"):
331
+ for i, r in enumerate(retrieved, start=1):
332
+ st.markdown(f"**Chunk {i}:** {r}")
333
+
334
+ # Optional helpful blocks (collapsed by default)
335
+ if toc:
336
+ with st.expander("πŸ“š Table of Contents"):
337
  toc_text = "\n".join([f"{sec}. {title}" for sec, title in toc])
338
+ st.text_area("", toc_text, height=140)
339
+ with st.expander("πŸ“„ Document Preview"):
340
+ st.text_area("", text[:1000], height=140)
341
+ st.caption(f"{len(chunks)} chunks processed.")