|
|
import streamlit as st |
|
|
import os |
|
|
import tempfile |
|
|
|
|
|
|
|
|
from src.rag_graph import ProjectRAGGraph |
|
|
|
|
|
|
|
|
|
|
|
st.set_page_config(page_title="Project Analyst RAG", layout="wide") |
|
|
st.title("π Professional Project Analyst Chat") |
|
|
|
|
|
|
|
|
if "rag_graph" not in st.session_state: |
|
|
st.session_state.rag_graph = ProjectRAGGraph() |
|
|
if "messages" not in st.session_state: |
|
|
st.session_state.messages = [] |
|
|
if "thread_id" not in st.session_state: |
|
|
st.session_state.thread_id = "default_user_1" |
|
|
|
|
|
|
|
|
with st.sidebar: |
|
|
st.header("Upload Documents") |
|
|
uploaded_files = st.file_uploader( |
|
|
"Upload Project PDFs", |
|
|
type="pdf", |
|
|
accept_multiple_files=True |
|
|
) |
|
|
|
|
|
process_button = st.button("Process Documents") |
|
|
|
|
|
if process_button and uploaded_files: |
|
|
with st.spinner("Processing PDFs..."): |
|
|
pdf_paths = [] |
|
|
original_names = [] |
|
|
for uploaded_file in uploaded_files: |
|
|
original_names.append(uploaded_file.name) |
|
|
with tempfile.NamedTemporaryFile(delete=False, suffix=".pdf") as tmp: |
|
|
tmp.write(uploaded_file.getvalue()) |
|
|
pdf_paths.append(tmp.name) |
|
|
|
|
|
|
|
|
st.session_state.rag_graph.process_documents( |
|
|
pdf_paths, |
|
|
original_names=original_names |
|
|
) |
|
|
|
|
|
for path in pdf_paths: |
|
|
os.remove(path) |
|
|
st.success("Documents Indexed Successfully!") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
for message in st.session_state.messages: |
|
|
with st.chat_message(message["role"]): |
|
|
st.markdown(message["content"]) |
|
|
if "citations" in message and message["citations"]: |
|
|
with st.expander("View Sources"): |
|
|
for doc in message["citations"]: |
|
|
st.caption(f"Source: {doc.metadata.get('source', 'Unknown')} - Page: {doc.metadata.get('page', 'N/A')}") |
|
|
st.write(f"_{doc.page_content[:200]}..._") |
|
|
|
|
|
|
|
|
if prompt := st.chat_input("Ask a question about your projects..."): |
|
|
|
|
|
if st.session_state.rag_graph.vector_store is None: |
|
|
st.error("Please upload and process documents first!") |
|
|
else: |
|
|
|
|
|
st.session_state.messages.append({"role": "user", "content": prompt}) |
|
|
with st.chat_message("user"): |
|
|
st.markdown(prompt) |
|
|
|
|
|
|
|
|
with st.chat_message("assistant"): |
|
|
with st.spinner("Analyzing..."): |
|
|
|
|
|
config = {"configurable": {"thread_id": st.session_state.thread_id}} |
|
|
inputs = {"question": prompt} |
|
|
|
|
|
|
|
|
result = st.session_state.rag_graph.workflow.invoke(inputs, config=config) |
|
|
|
|
|
answer = result["answer"] |
|
|
context = result["context"] |
|
|
|
|
|
st.markdown(answer) |
|
|
|
|
|
|
|
|
if context: |
|
|
with st.expander("View Sources"): |
|
|
for doc in context: |
|
|
source_name = os.path.basename(doc.metadata.get('source', 'Unknown')) |
|
|
page_num = doc.metadata.get('page', 0) + 1 |
|
|
st.caption(f"π {source_name} (Page {page_num})") |
|
|
st.write(f"_{doc.page_content[:300]}..._") |
|
|
|
|
|
|
|
|
st.session_state.messages.append({ |
|
|
"role": "assistant", |
|
|
"content": answer, |
|
|
"citations": context |
|
|
}) |