Spaces:
Sleeping
Sleeping
app13
Browse files
app.py
CHANGED
|
@@ -104,16 +104,9 @@ qa_chain = ConversationalRetrievalChain.from_llm(
|
|
| 104 |
get_chat_history = lambda h : h
|
| 105 |
)
|
| 106 |
|
| 107 |
-
def qa_response(
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
# Get response from the conversational retrieval chain
|
| 112 |
-
response = qa_chain.invoke(chat_history)
|
| 113 |
-
|
| 114 |
-
# Extract and return the assistant's answer from the response
|
| 115 |
-
assistant_answer = response.get("answer")
|
| 116 |
-
return assistant_answer
|
| 117 |
|
| 118 |
chatbot_gradio_app = gr.ChatInterface(fn=qa_response)
|
| 119 |
|
|
|
|
| 104 |
get_chat_history = lambda h : h
|
| 105 |
)
|
| 106 |
|
| 107 |
+
def qa_response(user_message,chat_history):
|
| 108 |
+
response = llm_chain.predict(user_message = user_message)
|
| 109 |
+
return response
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 110 |
|
| 111 |
chatbot_gradio_app = gr.ChatInterface(fn=qa_response)
|
| 112 |
|