JARVISXIRONMAN commited on
Commit
8d75349
Β·
verified Β·
1 Parent(s): 8cd9ba5

Create components/pdf_chat.py

Browse files
Files changed (1) hide show
  1. components/pdf_chat.py +34 -19
components/pdf_chat.py CHANGED
@@ -1,27 +1,42 @@
 
 
1
  import streamlit as st
2
- from utils.session import get_state_value
3
  from utils.retriever import load_vectorstore
4
- from langchain_community.chat_models import ChatGroq
5
  from langchain.chains import RetrievalQA
 
 
 
6
 
7
  def run_pdf_qa():
8
- st.title("πŸ’¬ Ask Questions from PDF")
9
- st.markdown("Chat with your uploaded PDF document using RAG-powered QA system.")
 
10
 
11
- pdf_path = get_state_value("pdf_path", None)
12
- if not pdf_path:
13
- st.warning("Please upload a PDF document in the Upload Docs section first.")
14
  return
15
 
16
- retriever = load_vectorstore(pdf_path)
17
- qa_chain = RetrievalQA.from_chain_type(
18
- llm=ChatGroq(model_name="llama3-8b-8192", temperature=0.3),
19
- retriever=retriever
20
- )
21
-
22
- query = st.text_input("❓ Enter your question about the document:")
23
- if query:
24
- with st.spinner("Generating answer..."):
25
- result = qa_chain.run(query)
26
- st.markdown("### βœ… Answer")
27
- st.markdown(result)
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # components/pdf_chat.py
2
+
3
  import streamlit as st
 
4
  from utils.retriever import load_vectorstore
5
+ from utils.session import load_from_json
6
  from langchain.chains import RetrievalQA
7
+ from langchain_core.messages import AIMessage
8
+ from langchain_community.chat_models import ChatGroq
9
+ from langchain_openai import OpenAIEmbeddings
10
 
11
  def run_pdf_qa():
12
+ st.subheader("πŸ’¬ Ask Questions From Your PDF")
13
+
14
+ pdf_path = load_from_json("pdf_path")
15
 
16
+ if not pdf_path or not pdf_path.endswith(".pdf"):
17
+ st.warning("πŸ“„ Please upload a PDF document first in the 'Upload Business Docs' section.")
 
18
  return
19
 
20
+ try:
21
+ retriever = load_vectorstore(pdf_path)
22
+ llm = ChatGroq(temperature=0.3, model_name="mixtral-8x7b-32768")
23
+ qa_chain = RetrievalQA.from_chain_type(
24
+ llm=llm,
25
+ retriever=retriever.as_retriever(),
26
+ return_source_documents=False
27
+ )
28
+
29
+ question = st.text_input("πŸ” Ask a question based on your uploaded PDF:")
30
+
31
+ if question:
32
+ with st.spinner("Generating answer..."):
33
+ result = qa_chain.invoke({"query": question})
34
+
35
+ if isinstance(result, dict) and "result" in result:
36
+ st.markdown("### βœ… Answer")
37
+ st.write(result["result"])
38
+ else:
39
+ st.error("Unexpected response from the model.")
40
+
41
+ except Exception as e:
42
+ st.error(f"❌ Error loading vector store: {str(e)}")