Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -30,25 +30,46 @@ from langchain.llms import OpenAI
|
|
| 30 |
chain = load_qa_chain(OpenAI(), chain_type="stuff")
|
| 31 |
import gradio as gr
|
| 32 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 33 |
|
| 34 |
-
# Define
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
def process_query(query):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
# Perform similarity search to retrieve relevant documents
|
| 37 |
docs = document_search.similarity_search(query)
|
| 38 |
|
| 39 |
-
# Run
|
| 40 |
-
response = chain.run(input_documents=docs, question=
|
|
|
|
|
|
|
|
|
|
| 41 |
|
| 42 |
# Return the response
|
| 43 |
return response
|
| 44 |
|
| 45 |
-
# Define
|
| 46 |
iface = gr.Interface(
|
| 47 |
-
fn=process_query,
|
| 48 |
-
inputs="text",
|
| 49 |
-
outputs="text",
|
| 50 |
-
title="
|
| 51 |
-
description="
|
| 52 |
)
|
| 53 |
|
| 54 |
# Launch the interface
|
|
|
|
| 30 |
chain = load_qa_chain(OpenAI(), chain_type="stuff")
|
| 31 |
import gradio as gr
|
| 32 |
|
| 33 |
+
template = """Meet Serene, your youthful and witty personal assistant! At 21 years old, she's full of energy and always eager to help. Serene's goal is to assist you with any questions or problems you might have regarding Schizophrenia. Her enthusiasm shines through in every response, making interactions with her enjoyable and engaging.
|
| 34 |
+
{chat_history}
|
| 35 |
+
User: {user_message}
|
| 36 |
+
Chatbot:"""
|
| 37 |
|
| 38 |
+
# Define prompt template
|
| 39 |
+
prompt = PromptTemplate(
|
| 40 |
+
input_variables=["chat_history", "user_message"], template=template
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
# Initialize conversation memory
|
| 44 |
+
memory = ConversationBufferMemory(memory_key="chat_history")
|
| 45 |
+
|
| 46 |
+
# Define function to process user query
|
| 47 |
def process_query(query):
|
| 48 |
+
# Retrieve conversation history
|
| 49 |
+
chat_history = memory.get()
|
| 50 |
+
|
| 51 |
+
# Apply prompt template
|
| 52 |
+
input_prompt = prompt.render(chat_history=chat_history, user_message=query)
|
| 53 |
+
|
| 54 |
# Perform similarity search to retrieve relevant documents
|
| 55 |
docs = document_search.similarity_search(query)
|
| 56 |
|
| 57 |
+
# Run question-answering process with retrieved documents and user query
|
| 58 |
+
response = chain.run(input_documents=docs, question=input_prompt)
|
| 59 |
+
|
| 60 |
+
# Add user query to conversation history
|
| 61 |
+
memory.add(query)
|
| 62 |
|
| 63 |
# Return the response
|
| 64 |
return response
|
| 65 |
|
| 66 |
+
# Define Gradio interface
|
| 67 |
iface = gr.Interface(
|
| 68 |
+
fn=process_query,
|
| 69 |
+
inputs="text",
|
| 70 |
+
outputs="text",
|
| 71 |
+
title="Hey People!",
|
| 72 |
+
description="Hi! How can I assist you?"
|
| 73 |
)
|
| 74 |
|
| 75 |
# Launch the interface
|