NavyDevilDoc commited on
Commit
6043f3a
·
verified ·
1 Parent(s): e5ea137

Update src/app.py

Browse files

added framework to move flattened context into the knowledge base

Files changed (1) hide show
  1. src/app.py +39 -7
src/app.py CHANGED
@@ -338,12 +338,16 @@ with tab2:
338
 
339
  # 3. FLATTEN
340
  with col_c:
 
 
 
 
341
  if st.button("📄 Flatten Context"):
342
  with st.spinner("Flattening..."):
343
  key = st.session_state.get("user_openai_key") or OPENAI_KEY
344
- # Extract
 
345
  with open(temp_path, "rb") as f:
346
- # Dummy object again for the loader
347
  class Wrapper:
348
  def __init__(self, data, n): self.data=data; self.name=n
349
  def read(self): return self.data
@@ -351,22 +355,50 @@ with tab2:
351
  Wrapper(f.read(), uploaded_file.name), use_vision=use_vision, api_key=key
352
  )
353
 
354
- # Parse
355
  proc = OutlineProcessor(raw)
356
  items = proc.parse()
357
 
358
- # Flatten
359
  out_txt = []
360
  bar = st.progress(0)
361
  for i, item in enumerate(items):
362
- # Use the Universal Router so it works with Granite too!
363
  p = f"Context: {item['context']}\nTarget: {item['target']}\nRewrite as one sentence."
364
  m = [{"role":"user", "content": p}]
365
  res, _ = query_model_universal(m, 300, model_choice, st.session_state.get("user_openai_key"))
366
  out_txt.append(res)
367
  bar.progress((i+1)/len(items))
368
-
369
- st.text_area("Result", "\n".join(out_txt), height=300)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
370
 
371
  st.divider()
372
 
 
338
 
339
  # 3. FLATTEN
340
  with col_c:
341
+ # We use a session state variable to store the result so it persists for the "Index" step
342
+ if "flattened_result" not in st.session_state:
343
+ st.session_state.flattened_result = None
344
+
345
  if st.button("📄 Flatten Context"):
346
  with st.spinner("Flattening..."):
347
  key = st.session_state.get("user_openai_key") or OPENAI_KEY
348
+
349
+ # A. Extract
350
  with open(temp_path, "rb") as f:
 
351
  class Wrapper:
352
  def __init__(self, data, n): self.data=data; self.name=n
353
  def read(self): return self.data
 
355
  Wrapper(f.read(), uploaded_file.name), use_vision=use_vision, api_key=key
356
  )
357
 
358
+ # B. Parse
359
  proc = OutlineProcessor(raw)
360
  items = proc.parse()
361
 
362
+ # C. Flatten
363
  out_txt = []
364
  bar = st.progress(0)
365
  for i, item in enumerate(items):
 
366
  p = f"Context: {item['context']}\nTarget: {item['target']}\nRewrite as one sentence."
367
  m = [{"role":"user", "content": p}]
368
  res, _ = query_model_universal(m, 300, model_choice, st.session_state.get("user_openai_key"))
369
  out_txt.append(res)
370
  bar.progress((i+1)/len(items))
371
+
372
+ # D. Store Result in Session State
373
+ final_flattened_text = "\n".join(out_txt)
374
+ st.session_state.flattened_result = {
375
+ "text": final_flattened_text,
376
+ "source": f"{uploaded_file.name}_flat"
377
+ }
378
+ st.rerun() # Refresh to show the new result/buttons
379
+
380
+ # Display Result & Index Option
381
+ if st.session_state.flattened_result:
382
+ res = st.session_state.flattened_result
383
+ st.success("Flattening Complete!")
384
+ st.text_area("Result", res["text"], height=200)
385
+
386
+ # The New Button
387
+ if st.button("📥 Index This Flattened Version"):
388
+ with st.spinner("Indexing Flattened Text..."):
389
+ ok, msg = rag_engine.process_and_add_text(
390
+ res["text"],
391
+ res["source"],
392
+ st.session_state.username
393
+ )
394
+ if ok:
395
+ tracker.upload_user_db(st.session_state.username) # Sync!
396
+ st.success(msg)
397
+ # Optional: Clear result after adding
398
+ # st.session_state.flattened_result = None
399
+ # st.rerun()
400
+ else:
401
+ st.error(msg)
402
 
403
  st.divider()
404