manabb commited on
Commit
fc56780
·
verified ·
1 Parent(s): 9eb70b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -5
app.py CHANGED
@@ -9,10 +9,16 @@ from huggingface_hub import hf_hub_download
9
  from huggingface_hub import HfApi, login
10
  from datetime import datetime
11
 
 
 
 
 
 
12
  client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
13
  api = HfApi(token=os.getenv("HF_TOKEN"))
14
  repo_id = "manabb/nrl"
15
  file_path_in_repo="LLMLogs.txt"
 
16
  #=====
17
  #Payment type
18
  manual_payment_type="""
@@ -50,7 +56,36 @@ PQC_rules="""
50
  10. PQC should be clarified in tender documents that authenticated documents are required.
51
  11. Adequacy of competition must be evaluated while framing PQC.
52
  12. PQC should balance inclusion of capable vendors and exclusion of incapable ones."""
 
 
 
 
 
 
 
 
 
 
 
 
 
53
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  #============starting extract_docx_text
55
 
56
  def extract_docx_text(file_path):
@@ -283,9 +318,11 @@ with gr.Blocks(css=css) as demo:
283
  </div>"""
284
  )
285
  with gr.TabItem("ChatBox-NRL maual-goods"):
286
- gr.HTML("""
287
- <div style="color: white; background: black; padding: 20px; text-align: center; font-size: 24px;">
288
- 🚧 Coming Soon 🚧
289
- </div>"""
290
- )
 
 
291
  demo.queue().launch()
 
9
  from huggingface_hub import HfApi, login
10
  from datetime import datetime
11
 
12
+ from manabUtils.py import retrieve_chunks
13
+
14
+ os.environ["HF_TOKEN"] = os.getenv("HF_TOKEN")
15
+ os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
16
+
17
  client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
18
  api = HfApi(token=os.getenv("HF_TOKEN"))
19
  repo_id = "manabb/nrl"
20
  file_path_in_repo="LLMLogs.txt"
21
+ llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
22
  #=====
23
  #Payment type
24
  manual_payment_type="""
 
56
  10. PQC should be clarified in tender documents that authenticated documents are required.
57
  11. Adequacy of competition must be evaluated while framing PQC.
58
  12. PQC should balance inclusion of capable vendors and exclusion of incapable ones."""
59
+ #===========================
60
+ def create_qa_chain():
61
+ retriever = retrieve_chunks(repo_id)
62
+ system_prompt = (
63
+ "You are a helpful assistant. Use the following context to answer the question. "
64
+ "If you don't know the answer, say so.\n\n{context}"
65
+ )
66
+ prompt = ChatPromptTemplate.from_messages([
67
+ ("system", system_prompt),
68
+ ("human", "{input}")
69
+ ])
70
+ doc_chain = create_stuff_documents_chain(llm, prompt)
71
+ return create_retrieval_chain(retriever, doc_chain)
72
 
73
+ qa_chain = create_qa_chain()
74
+ #=======================
75
+ def chat(message, history):
76
+ response = qa_chain.invoke({"input": message})
77
+ answer = response["answer"]
78
+
79
+ # Extract references with pages
80
+ docs = response["context"]
81
+ refs = []
82
+ for i, doc in enumerate(docs):
83
+ page = doc.metadata.get("page", "N/A")
84
+ refs.append(f"Ref {i+1}: Page {page}")
85
+
86
+ full_response = f"{answer}\n\n**References:**\n" + "\n".join(refs)
87
+ history.append([message, full_response])
88
+ return history, ""
89
  #============starting extract_docx_text
90
 
91
  def extract_docx_text(file_path):
 
318
  </div>"""
319
  )
320
  with gr.TabItem("ChatBox-NRL maual-goods"):
321
+ gr.ChatInterface(
322
+ chat,
323
+ title="RAG Chatbot with References",
324
+ description="Ask questions about your docs; see page refs!",
325
+ examples=["what is GeM", "explain compliance rules"],
326
+ cache_examples=False # Disable for dynamic retriever
327
+ )
328
  demo.queue().launch()