JARVISXIRONMAN commited on
Commit
1feb3e6
Β·
verified Β·
1 Parent(s): 6d15199

Create components/pdf_chat.py

Browse files
Files changed (1) hide show
  1. components/pdf_chat.py +35 -25
components/pdf_chat.py CHANGED
@@ -1,37 +1,47 @@
1
- # pdf_chat.py
2
 
3
  import streamlit as st
4
- import os
5
- from utils.retriever import load_vectorstore
6
- from langchain_groq import ChatGroq
7
  from langchain_core.messages import AIMessage
 
 
 
 
 
 
8
 
9
  def run_pdf_qa():
10
- st.title("πŸ’¬ Ask Your PDF")
11
 
12
  if "pdf_path" not in st.session_state:
13
- st.warning("⚠ Please upload a PDF first from the 'πŸ“€ Upload Business Docs' section.")
14
  return
15
 
16
- # Show current file
17
- st.markdown(f"**Using file:** `{os.path.basename(st.session_state.pdf_path)}`")
18
 
19
- question = st.text_input("❓ Ask a question based on your uploaded PDF")
 
20
 
21
  if question:
22
- with st.spinner("πŸ” Thinking..."):
23
- retriever = load_vectorstore(st.session_state.pdf_path)
24
- docs = retriever.get_relevant_documents(question)
25
- context = "\n\n".join([doc.page_content for doc in docs])
26
-
27
- chain = ChatGroq(model="llama3-8b-8192", temperature=0.3)
28
- response = chain.invoke(
29
- f"""Answer the question based on the context below:\n\n{context}\n\nQuestion: {question}"""
30
- )
31
-
32
- st.markdown("### πŸ’‘ Answer:")
33
- st.markdown(response.content if isinstance(response, AIMessage) else str(response))
34
-
35
- with st.expander("πŸ“„ Source Context"):
36
- for i, doc in enumerate(docs):
37
- st.markdown(f"**Snippet {i+1}**: {doc.page_content[:500]}...")
 
 
 
 
 
 
 
 
1
+ # components/pdf_chat.py
2
 
3
  import streamlit as st
 
 
 
4
  from langchain_core.messages import AIMessage
5
+ from utils.retriever import load_vectorstore
6
+ from langchain_core.runnables import RunnablePassthrough
7
+ from langchain_core.output_parsers import StrOutputParser
8
+ from langchain_core.prompts import ChatPromptTemplate
9
+ from langchain_community.chat_models import ChatGroq
10
+
11
 
12
  def run_pdf_qa():
13
+ st.header("πŸ’¬ Ask Your PDF")
14
 
15
  if "pdf_path" not in st.session_state:
16
+ st.warning("⚠️ Please upload a PDF first in the 'Upload Business Docs' section.")
17
  return
18
 
19
+ st.markdown(f"**Using file:** `{st.session_state.pdf_path.split('/')[-1]}`")
 
20
 
21
+ # User Question Input
22
+ question = st.text_input("❓ Ask a question based on your uploaded PDF", placeholder="e.g. What is the business model?")
23
 
24
  if question:
25
+ # Load vectorstore retriever
26
+ retriever = load_vectorstore(st.session_state.pdf_path)
27
+
28
+ # Prompt and Model
29
+ prompt = ChatPromptTemplate.from_messages([
30
+ ("system", "You are a helpful assistant. Use the retrieved context to answer the user's question."),
31
+ ("human", "Context:\n{context}\n\nQuestion: {question}")
32
+ ])
33
+ model = ChatGroq(temperature=0.2, model_name="LLaMA3-8b-8192")
34
+
35
+ rag_chain = (
36
+ {"context": retriever | RunnablePassthrough(), "question": RunnablePassthrough()}
37
+ | prompt
38
+ | model
39
+ | StrOutputParser()
40
+ )
41
+
42
+ with st.spinner("Thinking..."):
43
+ response = rag_chain.invoke(question)
44
+
45
+ # Display Answer
46
+ st.markdown("### πŸ’‘ Answer:")
47
+ st.write(response)