manabb commited on
Commit
b20df45
·
verified ·
1 Parent(s): bdce45f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -24
app.py CHANGED
@@ -9,9 +9,11 @@ from huggingface_hub import hf_hub_download
9
  from huggingface_hub import HfApi, login
10
  from datetime import datetime
11
  from langchain_openai import ChatOpenAI
12
- from langchain.chains import create_retrieval_chain
13
- from langchain.chains.combine_documents import create_stuff_documents_chain
14
- from langchain_core.prompts import ChatPromptTemplate
 
 
15
 
16
  from manabUtils import retrieve_chunks
17
 
@@ -63,32 +65,27 @@ PQC_rules="""
63
  #===========================
64
  def create_qa_chain():
65
  retriever = retrieve_chunks(repo_id)
66
- system_prompt = (
67
- "You are a helpful assistant. Use the following context to answer the question. "
68
- "If you don't know the answer, say so.\n\n{context}"
 
 
 
 
 
 
69
  )
70
- prompt = ChatPromptTemplate.from_messages([
71
- ("system", system_prompt),
72
- ("human", "{input}")
73
- ])
74
- doc_chain = create_stuff_documents_chain(llm, prompt)
75
- return create_retrieval_chain(retriever, doc_chain)
76
 
77
  qa_chain = create_qa_chain()
78
  #=======================
79
  def chat(message, history):
80
- response = qa_chain.invoke({"input": message})
81
- answer = response["answer"]
82
-
83
- # Extract references with pages
84
- docs = response["context"]
85
- refs = []
86
- for i, doc in enumerate(docs):
87
- page = doc.metadata.get("page", "N/A")
88
- refs.append(f"Ref {i+1}: Page {page}")
89
-
90
- full_response = f"{answer}\n\n**References:**\n" + "\n".join(refs)
91
- history.append([message, full_response])
92
  return history, ""
93
  #============starting extract_docx_text
94
 
 
9
  from huggingface_hub import HfApi, login
10
  from datetime import datetime
11
  from langchain_openai import ChatOpenAI
12
+
13
+
14
+ from langchain_core.prompts import ChatPromptTemplate, PromptTemplate
15
+ from langchain_core.runnables import RunnablePassthrough
16
+ from langchain_core.output_parsers import StrOutputParser
17
 
18
  from manabUtils import retrieve_chunks
19
 
 
65
  #===========================
66
  def create_qa_chain():
67
  retriever = retrieve_chunks(repo_id)
68
+ prompt = ChatPromptTemplate.from_template(
69
+ "Use context to answer: {context}\n\nQ: {input}"
70
+ )
71
+ chain = (
72
+ {"context": retriever | (lambda docs: "\n\n".join(doc.page_content for doc in docs)),
73
+ "input": RunnablePassthrough()}
74
+ | prompt
75
+ | llm
76
+ | StrOutputParser()
77
  )
78
+ return chain
 
 
 
 
 
79
 
80
  qa_chain = create_qa_chain()
81
  #=======================
82
  def chat(message, history):
83
+ answer = qa_chain.invoke(message)
84
+ # Get docs for refs
85
+ docs = retriever.invoke(message)
86
+ refs = [f"Page {d.metadata.get('page', 'N/A')}" for d in docs]
87
+ full = f"{answer}\n\nRefs: {' | '.join(refs)}"
88
+ history.append([message, full])
 
 
 
 
 
 
89
  return history, ""
90
  #============starting extract_docx_text
91