Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -459,7 +459,7 @@ def update_knowledge_base(user_input: str, assistant_response: str):
|
|
| 459 |
full_pipeline = hybrid_chain | RunnableAssign({"validation": validation_chain}) | answer_chain
|
| 460 |
|
| 461 |
|
| 462 |
-
def chat_interface(message, history):
|
| 463 |
inputs = {
|
| 464 |
"query": message,
|
| 465 |
"all_queries": [message],
|
|
@@ -469,10 +469,14 @@ def chat_interface(message, history):
|
|
| 469 |
"vectorstore": vectorstore,
|
| 470 |
"bm25_retriever": bm25_retriever,
|
| 471 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
| 472 |
full_response = ""
|
| 473 |
|
| 474 |
# Stream the response to user
|
| 475 |
-
for chunk in
|
| 476 |
if isinstance(chunk, dict) and "answer" in chunk:
|
| 477 |
full_response += chunk["answer"]
|
| 478 |
yield full_response
|
|
|
|
| 459 |
full_pipeline = hybrid_chain | RunnableAssign({"validation": validation_chain}) | answer_chain
|
| 460 |
|
| 461 |
|
| 462 |
+
def chat_interface(message, history):
|
| 463 |
inputs = {
|
| 464 |
"query": message,
|
| 465 |
"all_queries": [message],
|
|
|
|
| 469 |
"vectorstore": vectorstore,
|
| 470 |
"bm25_retriever": bm25_retriever,
|
| 471 |
}
|
| 472 |
+
|
| 473 |
+
hybrid_result = hybrid_chain.invoke(inputs)
|
| 474 |
+
hybrid_result["validation"] = validation_chain.invoke(hybrid_result)
|
| 475 |
+
|
| 476 |
full_response = ""
|
| 477 |
|
| 478 |
# Stream the response to user
|
| 479 |
+
for chunk in answer_chain.stream(hybrid_result):
|
| 480 |
if isinstance(chunk, dict) and "answer" in chunk:
|
| 481 |
full_response += chunk["answer"]
|
| 482 |
yield full_response
|