Spaces:
Sleeping
Sleeping
Create components/pdf_chat.py
Browse files- components/pdf_chat.py +35 -25
components/pdf_chat.py
CHANGED
|
@@ -1,37 +1,47 @@
|
|
| 1 |
-
# pdf_chat.py
|
| 2 |
|
| 3 |
import streamlit as st
|
| 4 |
-
import os
|
| 5 |
-
from utils.retriever import load_vectorstore
|
| 6 |
-
from langchain_groq import ChatGroq
|
| 7 |
from langchain_core.messages import AIMessage
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
def run_pdf_qa():
|
| 10 |
-
st.
|
| 11 |
|
| 12 |
if "pdf_path" not in st.session_state:
|
| 13 |
-
st.warning("
|
| 14 |
return
|
| 15 |
|
| 16 |
-
|
| 17 |
-
st.markdown(f"**Using file:** `{os.path.basename(st.session_state.pdf_path)}`")
|
| 18 |
|
| 19 |
-
|
|
|
|
| 20 |
|
| 21 |
if question:
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# components/pdf_chat.py
|
| 2 |
|
| 3 |
import streamlit as st
|
|
|
|
|
|
|
|
|
|
| 4 |
from langchain_core.messages import AIMessage
|
| 5 |
+
from utils.retriever import load_vectorstore
|
| 6 |
+
from langchain_core.runnables import RunnablePassthrough
|
| 7 |
+
from langchain_core.output_parsers import StrOutputParser
|
| 8 |
+
from langchain_core.prompts import ChatPromptTemplate
|
| 9 |
+
from langchain_community.chat_models import ChatGroq
|
| 10 |
+
|
| 11 |
|
| 12 |
def run_pdf_qa():
|
| 13 |
+
st.header("π¬ Ask Your PDF")
|
| 14 |
|
| 15 |
if "pdf_path" not in st.session_state:
|
| 16 |
+
st.warning("β οΈ Please upload a PDF first in the 'Upload Business Docs' section.")
|
| 17 |
return
|
| 18 |
|
| 19 |
+
st.markdown(f"**Using file:** `{st.session_state.pdf_path.split('/')[-1]}`")
|
|
|
|
| 20 |
|
| 21 |
+
# User Question Input
|
| 22 |
+
question = st.text_input("β Ask a question based on your uploaded PDF", placeholder="e.g. What is the business model?")
|
| 23 |
|
| 24 |
if question:
|
| 25 |
+
# Load vectorstore retriever
|
| 26 |
+
retriever = load_vectorstore(st.session_state.pdf_path)
|
| 27 |
+
|
| 28 |
+
# Prompt and Model
|
| 29 |
+
prompt = ChatPromptTemplate.from_messages([
|
| 30 |
+
("system", "You are a helpful assistant. Use the retrieved context to answer the user's question."),
|
| 31 |
+
("human", "Context:\n{context}\n\nQuestion: {question}")
|
| 32 |
+
])
|
| 33 |
+
model = ChatGroq(temperature=0.2, model_name="LLaMA3-8b-8192")
|
| 34 |
+
|
| 35 |
+
rag_chain = (
|
| 36 |
+
{"context": retriever | RunnablePassthrough(), "question": RunnablePassthrough()}
|
| 37 |
+
| prompt
|
| 38 |
+
| model
|
| 39 |
+
| StrOutputParser()
|
| 40 |
+
)
|
| 41 |
+
|
| 42 |
+
with st.spinner("Thinking..."):
|
| 43 |
+
response = rag_chain.invoke(question)
|
| 44 |
+
|
| 45 |
+
# Display Answer
|
| 46 |
+
st.markdown("### π‘ Answer:")
|
| 47 |
+
st.write(response)
|