Manglik-R commited on
Commit
60519ed
·
verified ·
1 Parent(s): 1f64dd9

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -23
app.py CHANGED
@@ -1,5 +1,4 @@
1
  import os
2
- import sys
3
  import gradio as gr
4
  from pinecone import Pinecone, ServerlessSpec
5
  from langchain_community.llms import Replicate
@@ -10,13 +9,12 @@ from langchain_huggingface.embeddings import HuggingFaceEmbeddings
10
  from langchain.chains import ConversationalRetrievalChain
11
  import time
12
 
13
- key1 = os.environ.get('REPLICATE_API_TOKEN')
14
- key2 = os.environ.get('PINECONE_API_KEY')
15
- os.environ['REPLICATE_API_TOKEN'] = key1
16
- os.environ["PINECONE_API_KEY"] = key2
17
 
18
  # Initialize Pinecone
19
- pc = Pinecone(api_key=os.environ["PINECONE_API_KEY"])
20
 
21
  # Function to process PDF and set up chatbot
22
  def process_pdf(pdf_doc):
@@ -71,7 +69,7 @@ def process_pdf(pdf_doc):
71
  def query(history, text):
72
  langchain_history = [(msg[1], history[i+1][1] if i+1 < len(history) else "") for i, msg in enumerate(history) if i % 2 == 0]
73
  result = qa_chain({"question": text, "chat_history": langchain_history})
74
- new_history = history + [(text,result['answer'])]
75
  return new_history, ""
76
 
77
  # Define the Gradio interface
@@ -80,22 +78,24 @@ css = """
80
  """
81
 
82
  title = """
83
- <div style="text-align: center;max-width: 700px;">
84
  <h1>Chat with PDF</h1>
 
85
  """
86
 
87
- with gr.Blocks(css=css) as demo:
88
- with gr.Column(elem_id="col-container"):
89
- gr.HTML(title)
90
- with gr.Column():
91
- pdf_doc = gr.File(label="Load a PDF", file_types=['.pdf'], type="filepath")
92
- load_pdf = gr.Button("Load PDF")
93
- langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False)
94
- chatbot = gr.Chatbot([], elem_id="chatbot").scale(height=350)
95
- question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter ")
96
- submit_btn = gr.Button("Send message")
97
- load_pdf.click(pdf_changes, inputs=[pdf_doc], outputs=[langchain_status], queue=False)
98
- question.submit(query, [chatbot, question], [chatbot, question])
99
- submit_btn.click(query, [chatbot, question], [chatbot, question])
100
-
101
- demo.launch()
 
 
1
  import os
 
2
  import gradio as gr
3
  from pinecone import Pinecone, ServerlessSpec
4
  from langchain_community.llms import Replicate
 
9
  from langchain.chains import ConversationalRetrievalChain
10
  import time
11
 
12
+ # Retrieve API keys from environment variables
13
+ replicate_api_token = os.getenv('REPLICATE_API_TOKEN')
14
+ pinecone_api_key = os.getenv('PINECONE_API_KEY')
 
15
 
16
  # Initialize Pinecone
17
+ pc = Pinecone(api_key=pinecone_api_key)
18
 
19
  # Function to process PDF and set up chatbot
20
  def process_pdf(pdf_doc):
 
69
  def query(history, text):
70
  langchain_history = [(msg[1], history[i+1][1] if i+1 < len(history) else "") for i, msg in enumerate(history) if i % 2 == 0]
71
  result = qa_chain({"question": text, "chat_history": langchain_history})
72
+ new_history = history + [(text, result['answer'])]
73
  return new_history, ""
74
 
75
  # Define the Gradio interface
 
78
  """
79
 
80
  title = """
81
+ <div style="text-align: center; max-width: 700px;">
82
  <h1>Chat with PDF</h1>
83
+ </div>
84
  """
85
 
86
+ def pdf_changes(pdf_doc):
87
+ result = process_pdf(pdf_doc)
88
+ return result
89
+
90
+ with gr.Interface(
91
+ fn=query,
92
+ inputs=[gr.Textbox(label="History", type="key-value", placeholder="History", rows=10),
93
+ gr.Textbox(label="Question", placeholder="Type your question and hit Enter")],
94
+ outputs=gr.Textbox(label="Answer", type="key-value", placeholder="Answer"),
95
+ live=True,
96
+ title="PDF Chatbot",
97
+ description="Ask questions based on uploaded PDF content.",
98
+ theme="compact",
99
+ css=css
100
+ ) as iface:
101
+ iface.launch()