HarshaBattula
commited on
Commit
·
c9433ef
1
Parent(s):
1f5af8e
update interface
Browse files
app.py
CHANGED
|
@@ -5,11 +5,6 @@ from retriever import *
|
|
| 5 |
from chain import *
|
| 6 |
import gradio as gr
|
| 7 |
|
| 8 |
-
def chatbot(query):
|
| 9 |
-
llm_response = qa_chain.run({"query": query})
|
| 10 |
-
return llm_response
|
| 11 |
-
|
| 12 |
-
|
| 13 |
def load_embeddings_database_from_disk(persistence_directory, embeddings_generator):
|
| 14 |
"""
|
| 15 |
Load a Chroma vector database from disk.
|
|
@@ -44,8 +39,31 @@ retriever = initialize_document_retriever(topk_documents, vector_database)
|
|
| 44 |
qa_chain = create_question_answering_chain(retriever)
|
| 45 |
|
| 46 |
|
| 47 |
-
|
| 48 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
|
| 50 |
-
|
| 51 |
-
show_progress = True, theme="compact").launch(debug=True)
|
|
|
|
| 5 |
from chain import *
|
| 6 |
import gradio as gr
|
| 7 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8 |
def load_embeddings_database_from_disk(persistence_directory, embeddings_generator):
|
| 9 |
"""
|
| 10 |
Load a Chroma vector database from disk.
|
|
|
|
| 39 |
qa_chain = create_question_answering_chain(retriever)
|
| 40 |
|
| 41 |
|
| 42 |
+
def add_text(history, text):
|
| 43 |
+
history = history + [(text, None)]
|
| 44 |
+
return history, gr.update(value="", interactive=False)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def bot(query):
|
| 48 |
+
llm_response = qa_chain.run({"query": query[-1][0]})
|
| 49 |
+
query[-1][1] = llm_response
|
| 50 |
+
return query
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
with gr.Blocks() as demo:
|
| 54 |
+
chatbot = gr.Chatbot([], elem_id="Retrieval Augmented Question Answering").style(height=750)
|
| 55 |
+
|
| 56 |
+
with gr.Row():
|
| 57 |
+
with gr.Column(scale=0.95):
|
| 58 |
+
txt = gr.Textbox(
|
| 59 |
+
show_label=False,
|
| 60 |
+
placeholder="Enter text and press enter",
|
| 61 |
+
).style(container=False)
|
| 62 |
+
|
| 63 |
+
txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(
|
| 64 |
+
bot, chatbot, chatbot
|
| 65 |
+
)
|
| 66 |
+
txt_msg.then(lambda: gr.update(interactive=True), None, txt, queue=False)
|
| 67 |
+
|
| 68 |
|
| 69 |
+
demo.launch()
|
|
|