File size: 2,354 Bytes
34e1f48
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
import streamlit as st
import uuid
import tempfile
from src.core.embeddings import load_embeddings
from src.core.llm import load_llm
from src.ingestion.document_processor import DocumentProcessor
from src.graph.rag_graph import ProjectRAGGraph

# ---------------- Page Config ----------------
st.set_page_config(page_title="πŸ“„ Project RAG Chat", layout="wide")
st.title("πŸ“‚ Industrial Project Report Chatbot")

# ---------------- Session State ----------------
if "thread_id" not in st.session_state:
    st.session_state.thread_id = str(uuid.uuid4())

if "chat_history" not in st.session_state:
    st.session_state.chat_history = []

if "rag_graph" not in st.session_state:
    st.session_state.rag_graph = None

# ---------------- Sidebar ----------------
st.sidebar.header("πŸ“„ Upload PDFs")
uploaded_files = st.sidebar.file_uploader(
    "Upload one or more project PDFs",
    type="pdf",
    accept_multiple_files=True
)

if uploaded_files and st.sidebar.button("πŸ” Process Documents"):
    with st.spinner("Processing documents..."):
        temp_paths = []

        for file in uploaded_files:
            with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp:
                tmp.write(file.read())
                temp_paths.append(tmp.name)

        embeddings = load_embeddings()
        llm = load_llm()

        processor = DocumentProcessor(embeddings)
        vector_store = processor.process_pdfs(temp_paths)

        st.session_state.rag_graph = ProjectRAGGraph(llm, vector_store)
        st.success("Documents processed successfully!")

# ---------------- Chat UI ----------------
st.subheader("πŸ’¬ Ask Questions")

user_question = st.chat_input("Ask something about the uploaded reports...")

if user_question and st.session_state.rag_graph:
    st.session_state.chat_history.append(("user", user_question))

    with st.spinner("Thinking..."):
        answer = st.session_state.rag_graph.query(
            user_question,
            st.session_state.thread_id
        )

    st.session_state.chat_history.append(("assistant", answer))

# ---------------- Display Chat ----------------
for role, msg in st.session_state.chat_history:
    with st.chat_message(role):
        st.markdown(msg)

if not st.session_state.rag_graph:
    st.info("⬅️ Upload PDFs and click *Process Documents* to start chatting.")