# ========================================================== # streamlit_app.py β€” Stable Layout + Latest Backend Improvements # ========================================================== import os import re import streamlit as st import torch # ========================================================== # βœ… PAGE CONFIG # ========================================================== st.set_page_config(page_title="Enterprise Knowledge Assistant", layout="wide") print("CUDA available:", torch.cuda.is_available()) # ========================================================== # βš™οΈ CACHE SETUP # ========================================================== CACHE_DIR = "/tmp/hf_cache" os.makedirs(CACHE_DIR, exist_ok=True) os.environ.update({ "HF_HOME": CACHE_DIR, "TRANSFORMERS_CACHE": CACHE_DIR, "HF_DATASETS_CACHE": CACHE_DIR, "HF_MODULES_CACHE": CACHE_DIR, }) # ========================================================== # πŸ“¦ IMPORTS # ========================================================== from ingestion import extract_text_from_pdf, chunk_text from vectorstore import build_faiss_index from qa import retrieve_chunks, generate_answer, cache_embeddings, embed_chunks, genai_generate # ========================================================== # 🧠 SMART SUGGESTION GENERATOR # ========================================================== def generate_dynamic_suggestions_from_toc(toc, chunks, doc_name="Document"): if not toc or not chunks: return [] titles = [] for sec, raw_title in toc: title = re.sub(r"^\s*[\dA-Za-z.\-]+\s*", "", raw_title) title = re.sub(r"\.{2,}\s*\d+$", "", title).strip() if 4 < len(title) < 120: titles.append(title) context_sample = " ".join(chunks[:3])[:4000] prompt = f""" You are generating short, natural, and context-aware questions for users reading "{doc_name}". Use the Table of Contents and some document text for inspiration. TABLE OF CONTENTS: {chr(10).join(['- ' + t for t in titles[:8]])} SAMPLE TEXT: {context_sample} Generate 5–7 clear and human-like questions based strictly on this document. Each should sound natural, under 18 words, and avoid robotic phrasing. """ try: ai_response = genai_generate(prompt) questions = re.findall(r"[-β€’]?\s*(.+?)\?", ai_response) clean_qs = [q.strip("β€’-β€” ").strip() + "?" for q in questions if 8 < len(q) < 120] seen, final = set(), [] for q in clean_qs: if q.lower() not in seen: seen.add(q.lower()) final.append(q) return final[:7] except Exception: return ["How do I start using this guide?", "What does this document cover?"] # ========================================================== # 🎨 STYLING β€” MINIMAL ENTERPRISE DESIGN # ========================================================== st.markdown(""" """, unsafe_allow_html=True) # ========================================================== # 🧭 SIDEBAR # ========================================================== with st.sidebar: st.markdown("### 🧭 Response Style") mode = st.radio( "", ("Strict (Document-only)", "Extended (Document + general)"), index=0, help="Strict = answers only from the uploaded document. Extended = may include related general info.", ) st.markdown("---") show_dev = st.checkbox("Show advanced settings (for developers)", value=False) if show_dev: st.markdown("### βš™οΈ Developer Options") chunk_size = st.slider("Chunk Size", 200, 1500, 1000, step=50) overlap = st.slider("Chunk Overlap", 50, 200, 120, step=10) top_k = st.slider("Top K Results", 1, 10, 5) else: chunk_size, overlap, top_k = 1000, 120, 5 st.markdown("---") st.caption("✨ Built by Shubham Sharma") # ========================================================== # 🧠 SESSION STATE # ========================================================== for key, val in { "user_query_input": "", "show_more": False, "selected_suggestion": None, "query_suggestions_fixed": None, "last_doc": None, }.items(): if key not in st.session_state: st.session_state[key] = val def set_user_query(q, idx): st.session_state["user_query_input"] = q st.session_state["selected_suggestion"] = idx st.experimental_rerun() # ========================================================== # πŸ“„ MAIN SECTION # ========================================================== st.title("πŸ“„ Enterprise Knowledge Assistant") st.caption("Query SAP documentation and enterprise PDFs β€” powered by reasoning and retrieval.") doc_choice = st.radio("Select a document:", ["-- Select --", "Sample PDF", "Upload Custom PDF"], index=0) # ========================================================== # πŸ“‚ DOCUMENT HANDLING # ========================================================== if doc_choice == "-- Select --": st.info("⬅️ Select or upload a document to begin.") else: if doc_choice == "Sample PDF": temp_path = os.path.join(os.path.dirname(__file__), "sample.pdf") st.success("πŸ“˜ Sample document loaded successfully β€” you can start asking your questions below.") else: uploaded_file = st.file_uploader("", type="pdf", label_visibility="collapsed") if uploaded_file: temp_path = os.path.join("/tmp", uploaded_file.name) with open(temp_path, "wb") as f: f.write(uploaded_file.getbuffer()) st.success("βœ… Document processed successfully β€” you can start asking your questions below.") else: temp_path = None if temp_path: with st.spinner("πŸ” Processing document..."): text, toc = extract_text_from_pdf(temp_path) chunks = chunk_text(text, chunk_size=chunk_size, overlap=overlap) with st.spinner("βš™οΈ Building search index..."): embeddings = cache_embeddings(os.path.basename(temp_path), chunks, embed_chunks) index = build_faiss_index(embeddings) doc_name = os.path.basename(temp_path) if st.session_state["last_doc"] != doc_name: query_suggestions = generate_dynamic_suggestions_from_toc(toc, chunks, doc_name) st.session_state["query_suggestions_fixed"] = query_suggestions st.session_state["last_doc"] = doc_name else: query_suggestions = st.session_state["query_suggestions_fixed"] # ---------------------------------------------------------- # πŸ’¬ ASK SECTION # ---------------------------------------------------------- st.markdown("### πŸ’¬ Ask the Assistant") if query_suggestions: visible = query_suggestions if st.session_state["show_more"] else query_suggestions[:3] cols = st.columns(min(3, len(visible))) for i, q in enumerate(visible): if cols[i % 3].button(f"πŸ’¬ {q}", key=f"sugg_{i}"): set_user_query(q, i) toggle_text = "Show less β–²" if st.session_state["show_more"] else "Show more β–Ό" if st.button(toggle_text, help="Show or hide more suggestions"): st.session_state["show_more"] = not st.session_state["show_more"] st.experimental_rerun() user_query = st.text_input("Type your question or click one above:", key="user_query_input") # ---------------------------------------------------------- # πŸ’‘ RESPONSE SECTION # ---------------------------------------------------------- if user_query.strip(): reasoning_mode = mode == "Extended (Document + general)" with st.spinner("πŸ’­ Generating your answer..."): retrieved = retrieve_chunks(user_query, index, chunks, top_k=top_k, embeddings=embeddings) answer = generate_answer(user_query, retrieved, reasoning_mode=reasoning_mode) st.markdown("### πŸ€– Assistant’s Answer") st.markdown(f"
{answer}
", unsafe_allow_html=True) with st.expander("πŸ“˜ Supporting Context"): for i, r in enumerate(retrieved, start=1): st.markdown(f"**Chunk {i}:** {r}") if toc: with st.expander("πŸ“š Explore Document Sections"): toc_text = "\n".join([f"{sec}. {title}" for sec, title in toc]) st.text_area("", toc_text, height=140) with st.expander("πŸ“„ Document Preview"): st.text_area("", text[:1000], height=140) st.caption(f"{len(chunks)} chunks processed.")