ChatBotsTA commited on
Commit
6401b0a
Β·
verified Β·
1 Parent(s): f9762c3

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +140 -136
app.py CHANGED
@@ -16,13 +16,15 @@ except Exception:
16
  HAS_PYTTSX3 = False
17
 
18
  # ============ CONFIG ============
19
- OPENROUTER_KEY = os.getenv("OPENROUTER_API_KEY")
 
20
  OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "gpt-4o-mini")
21
- ELEVEN_API_KEY = os.getenv("ELEVEN_API_KEY")
22
- HUGGINGFACE_KEY = os.getenv("HUGGINGFACE_API_KEY")
23
  HF_MERMAID_MODEL = os.getenv("HF_MERMAID_MODEL", "TroyDoesAI/MermaidStable3B")
24
 
25
  # ============ HELPERS ============
 
26
  def clean_text(text: str) -> str:
27
  return re.sub(r"\s+", " ", text or "").strip()
28
 
@@ -58,11 +60,9 @@ def openrouter_chat(messages: List[dict], model: str = OPENROUTER_MODEL, max_tok
58
  resp = requests.post(url, json=payload, headers=headers, timeout=30)
59
  resp.raise_for_status()
60
  data = resp.json()
61
- # robust parsing
62
  choices = data.get("choices", [])
63
  if choices:
64
  c = choices[0]
65
- # handle variations
66
  if "message" in c and isinstance(c["message"], dict):
67
  content = c["message"].get("content")
68
  if isinstance(content, dict) and "content" in content:
@@ -71,7 +71,6 @@ def openrouter_chat(messages: List[dict], model: str = OPENROUTER_MODEL, max_tok
71
  return True, content
72
  if "text" in c:
73
  return True, c["text"]
74
- # fallback: try top-level 'text' or 'output'
75
  if "text" in data:
76
  return True, data["text"]
77
  return False, "OpenRouter responded with unexpected shape"
@@ -80,17 +79,11 @@ def openrouter_chat(messages: List[dict], model: str = OPENROUTER_MODEL, max_tok
80
 
81
  # ============ Local extractive summarizer (offline) ============
82
  def extractive_summary(text: str, num_sentences: int = 6) -> str:
83
- # Very simple frequency-based extractive summarizer (works offline)
84
  if not text:
85
  return ""
86
- # split into sentences (naive)
87
  sentences = re.split(r'(?<=[.!?])\s+', text)
88
- # build frequency table of words
89
  words = re.findall(r'\w+', text.lower())
90
- stopwords = set([
91
- # minimal stopwords; you can expand
92
- "the","and","is","in","to","of","a","that","it","for","on","with","as","are","was","be","by","an","or"
93
- ])
94
  freq = {}
95
  for w in words:
96
  if w in stopwords or len(w) < 2:
@@ -98,16 +91,13 @@ def extractive_summary(text: str, num_sentences: int = 6) -> str:
98
  freq[w] = freq.get(w, 0) + 1
99
  if not freq:
100
  return "Unable to summarize (text too short)."
101
- # score sentences
102
  sent_scores = []
103
  for s in sentences:
104
  s_words = re.findall(r'\w+', s.lower())
105
  score = sum(freq.get(w, 0) for w in s_words)
106
  sent_scores.append((score, s))
107
- # pick top sentences
108
  sent_scores.sort(reverse=True, key=lambda x: x[0])
109
  chosen = [s for _, s in sent_scores[:num_sentences]]
110
- # preserve approximate original order
111
  chosen_sorted = sorted(chosen, key=lambda s: text.find(s))
112
  bullets = "\n".join(f"- {clean_text(s)}" for s in chosen_sorted if s.strip())
113
  return bullets if bullets else clean_text(" ".join(chosen_sorted))
@@ -133,13 +123,11 @@ def pyttsx3_tts_file(text: str):
133
  return False, "pyttsx3 not installed"
134
  try:
135
  engine = pyttsx3.init()
136
- # create a temp wav file
137
  tf = tempfile.NamedTemporaryFile(delete=False, suffix=".wav")
138
  tf_name = tf.name
139
  tf.close()
140
  engine.save_to_file(text, tf_name)
141
  engine.runAndWait()
142
- # read bytes
143
  with open(tf_name, "rb") as f:
144
  b = f.read()
145
  return True, b
@@ -158,7 +146,6 @@ def call_hf_mermaid(prompt: str, model: str = HF_MERMAID_MODEL):
158
  if not r.ok:
159
  return False, f"HuggingFace returned {r.status_code}: {r.text[:300]}"
160
  j = r.json()
161
- # extract text
162
  if isinstance(j, list) and len(j) > 0 and isinstance(j[0], dict) and "generated_text" in j[0]:
163
  return True, j[0]["generated_text"]
164
  if isinstance(j, str):
@@ -176,10 +163,8 @@ def generate_mermaid_from_summary(summary: str):
176
  "Output only the Mermaid code block. Summary:\n\n" + summary)
177
  ok, hf_out = call_hf_mermaid(prompt)
178
  if ok:
179
- # try to strip triple-backtick wrapper if present
180
  m = re.search(r"```(?:mermaid)?\n([\s\S]+?)```", hf_out, re.IGNORECASE)
181
  return hf_out if m is None else m.group(1).strip()
182
- # fallback local
183
  lines = re.split(r"\n+|-{1,}\s*|β€’\s*", summary)
184
  nodes = [clean_text(l) for l in lines if clean_text(l)]
185
  nodes = nodes[:8]
@@ -188,7 +173,7 @@ def generate_mermaid_from_summary(summary: str):
188
  mermaid = "flowchart TD\n"
189
  for i, n in enumerate(nodes):
190
  node_text = n.replace('"', "'")[:80]
191
- mermaid += ' A{idx}["{text}"]\n'.format(idx=i, text=node_text)
192
  for i in range(len(nodes) - 1):
193
  mermaid += f" A{i} --> A{i+1}\n"
194
  return mermaid
@@ -211,139 +196,158 @@ def render_mermaid(mermaid_code: str, height: int = 420):
211
  st.set_page_config(page_title="PDF Q&A resilient", layout="wide")
212
  st.title("πŸ“„ PDF Q&A β€” resilient (OpenRouter β†’ local fallback)")
213
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
214
  c1, c2, c3 = st.columns(3)
215
  with c1:
216
  st.write("OpenRouter:")
217
- if OPENROUTER_KEY:
218
- st.success("Key present")
219
- else:
220
- st.error("Key missing β€” will use local summarizer/Q&A fallback")
221
  with c2:
222
  st.write("Hugging Face:")
223
- if HUGGINGFACE_KEY:
224
- st.success("Key present (optional)")
225
- else:
226
- st.info("Key missing β€” using local Mermaid fallback")
227
  with c3:
228
  st.write("Audio:")
229
  if ELEVEN_API_KEY:
230
  st.success("ElevenLabs key present (preferred)")
 
 
231
  else:
232
- if HAS_PYTTSX3:
233
- st.info("Using local pyttsx3 fallback TTS")
234
- else:
235
- st.info("No ElevenLabs key and pyttsx3 not available β€” audio will be disabled")
236
 
237
- uploaded_file = st.file_uploader("Upload a PDF", type=["pdf"])
238
- if uploaded_file:
239
- try:
240
- with st.spinner("Extracting text from PDF..."):
241
- raw_text = extract_text_from_pdf(uploaded_file)
242
- except Exception as e:
243
- st.error(f"PDF extraction failed: {e}")
244
- raw_text = ""
 
 
 
 
245
 
246
- if not raw_text:
247
- st.warning("No text extracted (maybe scanned PDF). Use OCR if needed.")
248
- else:
249
- st.success(f"Extracted {len(raw_text)} characters")
250
- # Summarize & diagram
251
- if st.button("Summarize & Diagram (tries OpenRouter β†’ fallback)"):
252
- used = {"summary": None, "summary_source": None, "tts_source": None, "mermaid_source": None}
253
- # Try OpenRouter summary first
254
- with st.spinner("Trying OpenRouter summarization..."):
255
- prompt = f"Summarize the following text in 5-8 concise bullets:\n\n{raw_text[:15000]}"
256
- messages = [{"role": "system", "content": "You are a concise summarizer."},
257
- {"role": "user", "content": prompt}]
258
- ok, out = openrouter_chat(messages, max_tokens=400, model=OPENROUTER_MODEL)
259
  if ok:
260
- used["summary_source"] = "openrouter"
261
- summary = out
262
  else:
263
- # fallback to local
264
- used["summary_source"] = f"fallback_local (reason: {out})"
265
- summary = extractive_summary(raw_text, num_sentences=6)
266
-
267
- st.subheader("πŸ“Œ Summary")
268
- st.write(summary)
269
- st.markdown(f"**Summary source:** {used['summary_source']}")
270
-
271
- # Mermaid
272
- with st.spinner("Generating Mermaid diagram (HF β†’ local fallback)..."):
273
- mermaid = generate_mermaid_from_summary(summary)
274
- # determine mermaid source roughly
275
- used["mermaid_source"] = "huggingface" if HUGGINGFACE_KEY and mermaid.strip().startswith(("flowchart","graph")) else "local"
276
- st.subheader("πŸ—ΊοΈ Summary Diagram")
277
- render_mermaid(mermaid, height=460)
278
- st.code(mermaid, language="mermaid")
279
- st.markdown(f"**Mermaid source:** {used['mermaid_source']}")
280
-
281
- # TTS: try ElevenLabs first, then pyttsx3
282
- if st.checkbox("Enable audio for summary (try ElevenLabs β†’ fallback)"):
283
- with st.spinner("Attempting TTS..."):
284
- if ELEVEN_API_KEY:
285
- ok, out = eleven_tts_bytes(summary)
286
- if ok:
287
- used["tts_source"] = "elevenlabs"
288
- st.audio(out, format="audio/mp3")
289
- else:
290
- # record reason and try pyttsx3
291
- used["tts_source"] = f"elevenlabs_failed ({out})"
292
- if HAS_PYTTSX3:
293
- ok2, out2 = pyttsx3_tts_file(summary)
294
- if ok2:
295
- used["tts_source"] = "pyttsx3"
296
- st.audio(out2, format="audio/wav")
297
- else:
298
- st.error(f"TTS fallback failed: {out2}")
299
- else:
300
- st.error("ElevenLabs TTS failed and pyttsx3 not available.")
301
- else:
302
- if HAS_PYTTSX3:
303
- ok2, out2 = pyttsx3_tts_file(summary)
304
- if ok2:
305
- used["tts_source"] = "pyttsx3"
306
- st.audio(out2, format="audio/wav")
307
- else:
308
- st.error(f"pyttsx3 TTS failed: {out2}")
309
- else:
310
- st.info("No TTS available (no ElevenLabs key and pyttsx3 missing).")
311
 
312
- st.write("### Diagnostics")
313
- st.json(used)
314
 
315
- # Q&A box (tries OpenRouter, otherwise local naive search)
316
- query = st.text_input("Ask a question about the PDF (press Enter):")
317
- if query:
 
 
318
  if OPENROUTER_KEY:
319
- with st.spinner("Asking OpenRouter..."):
320
- prompt = f"Context:\n{raw_text[:15000]}\n\nQuestion: {query}\nAnswer concisely."
321
- messages = [{"role": "system", "content": "You are a helpful assistant."},
322
- {"role": "user", "content": prompt}]
323
- ok, out = openrouter_chat(messages, max_tokens=600, model=OPENROUTER_MODEL)
324
- if ok:
325
- st.subheader("πŸ’‘ Answer (OpenRouter)")
326
- st.write(out)
 
 
 
 
 
 
 
327
  else:
328
- st.warning(f"OpenRouter failed: {out}\nFalling back to local Q&A.")
329
- # fallback to very naive local answer: search for query words in text and return matching sentences
330
- q = query.lower()
331
- sentences = re.split(r'(?<=[.!?])\s+', raw_text)
332
- matches = [s for s in sentences if all(w in s.lower() for w in re.findall(r'\w+', q)[:3])]
333
- if matches:
334
- st.subheader("πŸ’‘ Answer (local fallback)")
335
- st.write(matches[:3])
336
- else:
337
- st.info("No good local match found.")
338
  else:
339
- st.info("OpenRouter key missing β€” using local Q&A fallback.")
340
- q = query.lower()
341
- sentences = re.split(r'(?<=[.!?])\s+', raw_text)
342
- matches = [s for s in sentences if all(w in s.lower() for w in re.findall(r'\w+', q)[:3])]
343
  if matches:
344
- st.subheader("πŸ’‘ Answer (local fallback)")
345
  st.write(matches[:3])
346
  else:
347
- st.info("No good local match found.")
348
- else:
349
- st.info("Upload a PDF to begin.")
 
16
  HAS_PYTTSX3 = False
17
 
18
  # ============ CONFIG ============
19
+ # Use st.secrets for Streamlit Cloud deployment, or environment variables for local
20
+ OPENROUTER_KEY = os.getenv("OPENROUTER_API_KEY", st.secrets.get("OPENROUTER_API_KEY"))
21
  OPENROUTER_MODEL = os.getenv("OPENROUTER_MODEL", "gpt-4o-mini")
22
+ ELEVEN_API_KEY = os.getenv("ELEVEN_API_KEY", st.secrets.get("ELEVEN_API_KEY"))
23
+ HUGGINGFACE_KEY = os.getenv("HUGGINGFACE_API_KEY", st.secrets.get("HUGGINGFACE_API_KEY"))
24
  HF_MERMAID_MODEL = os.getenv("HF_MERMAID_MODEL", "TroyDoesAI/MermaidStable3B")
25
 
26
  # ============ HELPERS ============
27
+ # (rest of the helper functions from your original code are here, unchanged)
28
  def clean_text(text: str) -> str:
29
  return re.sub(r"\s+", " ", text or "").strip()
30
 
 
60
  resp = requests.post(url, json=payload, headers=headers, timeout=30)
61
  resp.raise_for_status()
62
  data = resp.json()
 
63
  choices = data.get("choices", [])
64
  if choices:
65
  c = choices[0]
 
66
  if "message" in c and isinstance(c["message"], dict):
67
  content = c["message"].get("content")
68
  if isinstance(content, dict) and "content" in content:
 
71
  return True, content
72
  if "text" in c:
73
  return True, c["text"]
 
74
  if "text" in data:
75
  return True, data["text"]
76
  return False, "OpenRouter responded with unexpected shape"
 
79
 
80
  # ============ Local extractive summarizer (offline) ============
81
  def extractive_summary(text: str, num_sentences: int = 6) -> str:
 
82
  if not text:
83
  return ""
 
84
  sentences = re.split(r'(?<=[.!?])\s+', text)
 
85
  words = re.findall(r'\w+', text.lower())
86
+ stopwords = set(["the","and","is","in","to","of","a","that","it","for","on","with","as","are","was","be","by","an","or"])
 
 
 
87
  freq = {}
88
  for w in words:
89
  if w in stopwords or len(w) < 2:
 
91
  freq[w] = freq.get(w, 0) + 1
92
  if not freq:
93
  return "Unable to summarize (text too short)."
 
94
  sent_scores = []
95
  for s in sentences:
96
  s_words = re.findall(r'\w+', s.lower())
97
  score = sum(freq.get(w, 0) for w in s_words)
98
  sent_scores.append((score, s))
 
99
  sent_scores.sort(reverse=True, key=lambda x: x[0])
100
  chosen = [s for _, s in sent_scores[:num_sentences]]
 
101
  chosen_sorted = sorted(chosen, key=lambda s: text.find(s))
102
  bullets = "\n".join(f"- {clean_text(s)}" for s in chosen_sorted if s.strip())
103
  return bullets if bullets else clean_text(" ".join(chosen_sorted))
 
123
  return False, "pyttsx3 not installed"
124
  try:
125
  engine = pyttsx3.init()
 
126
  tf = tempfile.NamedTemporaryFile(delete=False, suffix=".wav")
127
  tf_name = tf.name
128
  tf.close()
129
  engine.save_to_file(text, tf_name)
130
  engine.runAndWait()
 
131
  with open(tf_name, "rb") as f:
132
  b = f.read()
133
  return True, b
 
146
  if not r.ok:
147
  return False, f"HuggingFace returned {r.status_code}: {r.text[:300]}"
148
  j = r.json()
 
149
  if isinstance(j, list) and len(j) > 0 and isinstance(j[0], dict) and "generated_text" in j[0]:
150
  return True, j[0]["generated_text"]
151
  if isinstance(j, str):
 
163
  "Output only the Mermaid code block. Summary:\n\n" + summary)
164
  ok, hf_out = call_hf_mermaid(prompt)
165
  if ok:
 
166
  m = re.search(r"```(?:mermaid)?\n([\s\S]+?)```", hf_out, re.IGNORECASE)
167
  return hf_out if m is None else m.group(1).strip()
 
168
  lines = re.split(r"\n+|-{1,}\s*|β€’\s*", summary)
169
  nodes = [clean_text(l) for l in lines if clean_text(l)]
170
  nodes = nodes[:8]
 
173
  mermaid = "flowchart TD\n"
174
  for i, n in enumerate(nodes):
175
  node_text = n.replace('"', "'")[:80]
176
+ mermaid += f' A{i}["{node_text}"]\n'
177
  for i in range(len(nodes) - 1):
178
  mermaid += f" A{i} --> A{i+1}\n"
179
  return mermaid
 
196
  st.set_page_config(page_title="PDF Q&A resilient", layout="wide")
197
  st.title("πŸ“„ PDF Q&A β€” resilient (OpenRouter β†’ local fallback)")
198
 
199
+ # Session state initialization
200
+ if 'text_data' not in st.session_state:
201
+ st.session_state.text_data = None
202
+ if 'summary' not in st.session_state:
203
+ st.session_state.summary = None
204
+ if 'mermaid' not in st.session_state:
205
+ st.session_state.mermaid = None
206
+ if 'diagnostics' not in st.session_state:
207
+ st.session_state.diagnostics = {"summary_source": None, "mermaid_source": None, "tts_source": None}
208
+
209
+ def process_pdf():
210
+ uploaded_file = st.session_state.uploaded_file
211
+ if uploaded_file:
212
+ try:
213
+ with st.spinner("Extracting text from PDF..."):
214
+ raw_text = extract_text_from_pdf(uploaded_file)
215
+ st.session_state.text_data = raw_text
216
+ st.success(f"Extracted {len(raw_text)} characters")
217
+ except Exception as e:
218
+ st.error(f"PDF extraction failed: {e}")
219
+ st.session_state.text_data = None
220
+ else:
221
+ st.session_state.text_data = None
222
+
223
+ def generate_outputs():
224
+ raw_text = st.session_state.text_data
225
+ if not raw_text:
226
+ st.error("No text available to process. Please upload a PDF.")
227
+ return
228
+
229
+ used = st.session_state.diagnostics
230
+
231
+ # Summarize logic
232
+ with st.spinner("Trying OpenRouter summarization..."):
233
+ prompt = f"Summarize the following text in 5-8 concise bullets:\n\n{raw_text[:15000]}"
234
+ messages = [{"role": "system", "content": "You are a concise summarizer."},
235
+ {"role": "user", "content": prompt}]
236
+ ok, out = openrouter_chat(messages, max_tokens=400, model=OPENROUTER_MODEL)
237
+ if ok:
238
+ used["summary_source"] = "openrouter"
239
+ summary = out
240
+ else:
241
+ used["summary_source"] = f"fallback_local (reason: {out})"
242
+ summary = extractive_summary(raw_text, num_sentences=6)
243
+ st.session_state.summary = summary
244
+ st.session_state.diagnostics = used
245
+
246
+ # Mermaid logic
247
+ with st.spinner("Generating Mermaid diagram (HF β†’ local fallback)..."):
248
+ mermaid = generate_mermaid_from_summary(summary)
249
+ used["mermaid_source"] = "huggingface" if HUGGINGFACE_KEY and mermaid.strip().startswith(("flowchart","graph")) else "local"
250
+ st.session_state.mermaid = mermaid
251
+ st.session_state.diagnostics = used
252
+
253
+ st.success("Summary and Diagram generated!")
254
+
255
+ # UI layout
256
  c1, c2, c3 = st.columns(3)
257
  with c1:
258
  st.write("OpenRouter:")
259
+ st.success("Key present") if OPENROUTER_KEY else st.error("Key missing β€” will use local summarizer/Q&A fallback")
 
 
 
260
  with c2:
261
  st.write("Hugging Face:")
262
+ st.success("Key present (optional)") if HUGGINGFACE_KEY else st.info("Key missing β€” using local Mermaid fallback")
 
 
 
263
  with c3:
264
  st.write("Audio:")
265
  if ELEVEN_API_KEY:
266
  st.success("ElevenLabs key present (preferred)")
267
+ elif HAS_PYTTSX3:
268
+ st.info("Using local pyttsx3 fallback TTS")
269
  else:
270
+ st.info("No ElevenLabs key and pyttsx3 not available")
271
+
272
+ st.file_uploader("Upload a PDF", type=["pdf"], key='uploaded_file', on_change=process_pdf)
 
273
 
274
+ if st.session_state.text_data:
275
+ st.button("Summarize & Diagram", on_click=generate_outputs)
276
+
277
+ if st.session_state.summary:
278
+ st.subheader("πŸ“Œ Summary")
279
+ st.write(st.session_state.summary)
280
+ st.markdown(f"**Summary source:** {st.session_state.diagnostics['summary_source']}")
281
+
282
+ st.subheader("πŸ—ΊοΈ Summary Diagram")
283
+ render_mermaid(st.session_state.mermaid, height=460)
284
+ st.code(st.session_state.mermaid, language="mermaid")
285
+ st.markdown(f"**Mermaid source:** {st.session_state.diagnostics['mermaid_source']}")
286
 
287
+ st.write("### TTS Audio")
288
+ if st.checkbox("Generate audio for summary"):
289
+ with st.spinner("Attempting TTS..."):
290
+ audio_bytes = None
291
+ if ELEVEN_API_KEY:
292
+ ok, out = eleven_tts_bytes(st.session_state.summary)
 
 
 
 
 
 
 
293
  if ok:
294
+ st.session_state.diagnostics["tts_source"] = "elevenlabs"
295
+ audio_bytes = out
296
  else:
297
+ st.session_state.diagnostics["tts_source"] = f"elevenlabs_failed ({out})"
298
+ if HAS_PYTTSX3:
299
+ ok2, out2 = pyttsx3_tts_file(st.session_state.summary)
300
+ if ok2:
301
+ st.session_state.diagnostics["tts_source"] = "pyttsx3"
302
+ audio_bytes = out2
303
+ elif HAS_PYTTSX3:
304
+ ok2, out2 = pyttsx3_tts_file(st.session_state.summary)
305
+ if ok2:
306
+ st.session_state.diagnostics["tts_source"] = "pyttsx3"
307
+ audio_bytes = out2
308
+
309
+ if audio_bytes:
310
+ if st.session_state.diagnostics["tts_source"] == "elevenlabs":
311
+ st.audio(audio_bytes, format="audio/mp3")
312
+ else:
313
+ st.audio(audio_bytes, format="audio/wav")
314
+ else:
315
+ st.error("Audio generation failed. Check your API keys and local setup.")
316
+ st.markdown(f"**TTS source:** {st.session_state.diagnostics['tts_source']}")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
317
 
318
+ st.write("### Diagnostics")
319
+ st.json(st.session_state.diagnostics)
320
 
321
+ st.markdown("---")
322
+ st.subheader("❓ Q&A")
323
+ query = st.text_input("Ask a question about the PDF:")
324
+ if query:
325
+ with st.spinner("Processing your question..."):
326
  if OPENROUTER_KEY:
327
+ prompt = f"Context:\n{st.session_state.text_data[:15000]}\n\nQuestion: {query}\nAnswer concisely."
328
+ messages = [{"role": "system", "content": "You are a helpful assistant."},
329
+ {"role": "user", "content": prompt}]
330
+ ok, out = openrouter_chat(messages, max_tokens=600, model=OPENROUTER_MODEL)
331
+ if ok:
332
+ st.info("Answer from OpenRouter:")
333
+ st.write(out)
334
+ else:
335
+ st.warning(f"OpenRouter failed: {out}\nFalling back to local Q&A.")
336
+ sentences = re.split(r'(?<=[.!?])\s+', st.session_state.text_data)
337
+ q_words = re.findall(r'\w+', query.lower())[:3]
338
+ matches = [s for s in sentences if all(w in s.lower() for w in q_words)]
339
+ if matches:
340
+ st.info("Answer from local fallback:")
341
+ st.write(matches[:3])
342
  else:
343
+ st.info("No good local match found.")
 
 
 
 
 
 
 
 
 
344
  else:
345
+ st.info("OpenRouter key missing. Using local Q&A fallback.")
346
+ sentences = re.split(r'(?<=[.!?])\s+', st.session_state.text_data)
347
+ q_words = re.findall(r'\w+', query.lower())[:3]
348
+ matches = [s for s in sentences if all(w in s.lower() for w in q_words)]
349
  if matches:
350
+ st.info("Answer from local fallback:")
351
  st.write(matches[:3])
352
  else:
353
+ st.info("No good local match found.")