|
|
import streamlit as st |
|
|
import uuid |
|
|
import tempfile |
|
|
from src.core.embeddings import load_embeddings |
|
|
from src.core.llm import load_llm |
|
|
from src.ingestion.document_processor import DocumentProcessor |
|
|
from src.graph.rag_graph import ProjectRAGGraph |
|
|
|
|
|
|
|
|
st.set_page_config(page_title="π Project RAG Chat", layout="wide") |
|
|
st.title("π Industrial Project Report Chatbot") |
|
|
|
|
|
|
|
|
if "thread_id" not in st.session_state: |
|
|
st.session_state.thread_id = str(uuid.uuid4()) |
|
|
|
|
|
if "chat_history" not in st.session_state: |
|
|
st.session_state.chat_history = [] |
|
|
|
|
|
if "rag_graph" not in st.session_state: |
|
|
st.session_state.rag_graph = None |
|
|
|
|
|
|
|
|
st.sidebar.header("π Upload PDFs") |
|
|
uploaded_files = st.sidebar.file_uploader( |
|
|
"Upload one or more project PDFs", |
|
|
type="pdf", |
|
|
accept_multiple_files=True |
|
|
) |
|
|
|
|
|
if uploaded_files and st.sidebar.button("π Process Documents"): |
|
|
with st.spinner("Processing documents..."): |
|
|
temp_paths = [] |
|
|
|
|
|
for file in uploaded_files: |
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp: |
|
|
tmp.write(file.read()) |
|
|
temp_paths.append(tmp.name) |
|
|
|
|
|
embeddings = load_embeddings() |
|
|
llm = load_llm() |
|
|
|
|
|
processor = DocumentProcessor(embeddings) |
|
|
vector_store = processor.process_pdfs(temp_paths) |
|
|
|
|
|
st.session_state.rag_graph = ProjectRAGGraph(llm, vector_store) |
|
|
st.success("Documents processed successfully!") |
|
|
|
|
|
|
|
|
st.subheader("π¬ Ask Questions") |
|
|
|
|
|
user_question = st.chat_input("Ask something about the uploaded reports...") |
|
|
|
|
|
if user_question and st.session_state.rag_graph: |
|
|
st.session_state.chat_history.append(("user", user_question)) |
|
|
|
|
|
with st.spinner("Thinking..."): |
|
|
answer = st.session_state.rag_graph.query( |
|
|
user_question, |
|
|
st.session_state.thread_id |
|
|
) |
|
|
|
|
|
st.session_state.chat_history.append(("assistant", answer)) |
|
|
|
|
|
|
|
|
for role, msg in st.session_state.chat_history: |
|
|
with st.chat_message(role): |
|
|
st.markdown(msg) |
|
|
|
|
|
if not st.session_state.rag_graph: |
|
|
st.info("β¬
οΈ Upload PDFs and click *Process Documents* to start chatting.") |
|
|
|