Spaces:
Sleeping
Sleeping
Create components/pdf_chat.py
Browse files- components/pdf_chat.py +34 -19
components/pdf_chat.py
CHANGED
|
@@ -1,27 +1,42 @@
|
|
|
|
|
|
|
|
| 1 |
import streamlit as st
|
| 2 |
-
from utils.session import get_state_value
|
| 3 |
from utils.retriever import load_vectorstore
|
| 4 |
-
from
|
| 5 |
from langchain.chains import RetrievalQA
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
def run_pdf_qa():
|
| 8 |
-
st.
|
| 9 |
-
|
|
|
|
| 10 |
|
| 11 |
-
pdf_path
|
| 12 |
-
|
| 13 |
-
st.warning("Please upload a PDF document in the Upload Docs section first.")
|
| 14 |
return
|
| 15 |
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
llm=ChatGroq(model_name="
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# components/pdf_chat.py
|
| 2 |
+
|
| 3 |
import streamlit as st
|
|
|
|
| 4 |
from utils.retriever import load_vectorstore
|
| 5 |
+
from utils.session import load_from_json
|
| 6 |
from langchain.chains import RetrievalQA
|
| 7 |
+
from langchain_core.messages import AIMessage
|
| 8 |
+
from langchain_community.chat_models import ChatGroq
|
| 9 |
+
from langchain_openai import OpenAIEmbeddings
|
| 10 |
|
| 11 |
def run_pdf_qa():
|
| 12 |
+
st.subheader("π¬ Ask Questions From Your PDF")
|
| 13 |
+
|
| 14 |
+
pdf_path = load_from_json("pdf_path")
|
| 15 |
|
| 16 |
+
if not pdf_path or not pdf_path.endswith(".pdf"):
|
| 17 |
+
st.warning("π Please upload a PDF document first in the 'Upload Business Docs' section.")
|
|
|
|
| 18 |
return
|
| 19 |
|
| 20 |
+
try:
|
| 21 |
+
retriever = load_vectorstore(pdf_path)
|
| 22 |
+
llm = ChatGroq(temperature=0.3, model_name="mixtral-8x7b-32768")
|
| 23 |
+
qa_chain = RetrievalQA.from_chain_type(
|
| 24 |
+
llm=llm,
|
| 25 |
+
retriever=retriever.as_retriever(),
|
| 26 |
+
return_source_documents=False
|
| 27 |
+
)
|
| 28 |
+
|
| 29 |
+
question = st.text_input("π Ask a question based on your uploaded PDF:")
|
| 30 |
+
|
| 31 |
+
if question:
|
| 32 |
+
with st.spinner("Generating answer..."):
|
| 33 |
+
result = qa_chain.invoke({"query": question})
|
| 34 |
+
|
| 35 |
+
if isinstance(result, dict) and "result" in result:
|
| 36 |
+
st.markdown("### β
Answer")
|
| 37 |
+
st.write(result["result"])
|
| 38 |
+
else:
|
| 39 |
+
st.error("Unexpected response from the model.")
|
| 40 |
+
|
| 41 |
+
except Exception as e:
|
| 42 |
+
st.error(f"β Error loading vector store: {str(e)}")
|