Spaces:
Sleeping
Sleeping
Commit
·
b49efba
1
Parent(s):
9900574
Update app.py
Browse files
app.py
CHANGED
|
@@ -17,7 +17,6 @@ import ollama
|
|
| 17 |
import gradio as gr
|
| 18 |
from gradio.themes.utils import colors, fonts, sizes
|
| 19 |
from gradio.themes import Base
|
| 20 |
-
from gradio.events import EditData
|
| 21 |
from huggingface_hub import whoami
|
| 22 |
import re
|
| 23 |
|
|
@@ -176,7 +175,6 @@ class VectorStoreManager:
|
|
| 176 |
"""Find a vector store by its display name"""
|
| 177 |
for name, store_info in self.vector_stores.items():
|
| 178 |
if store_info["display_name"] == display_name:
|
| 179 |
-
|
| 180 |
return self.vector_stores[name]["data"]
|
| 181 |
return None
|
| 182 |
|
|
@@ -201,15 +199,12 @@ vector_store_manager = VectorStoreManager()
|
|
| 201 |
|
| 202 |
|
| 203 |
|
| 204 |
-
|
| 205 |
-
|
| 206 |
class DEKCIBChatbot:
|
| 207 |
def __init__(self):
|
| 208 |
self.initialize()
|
| 209 |
|
| 210 |
|
| 211 |
def initialize(self):
|
| 212 |
-
# self.user_id = self.get_user_data(USER_NAME)
|
| 213 |
self.session_manager = SessionManager()
|
| 214 |
self.embed_model = OptimumEmbedding(folder_name=EMBED_MODEL_PATH)
|
| 215 |
Settings.embed_model = self.embed_model
|
|
@@ -218,11 +213,6 @@ class DEKCIBChatbot:
|
|
| 218 |
self.config = self._load_config()
|
| 219 |
self.llm_options = self._initialize_models()
|
| 220 |
|
| 221 |
-
# default_vector_store = self.vector_stores[DEFAULT_VECTOR_STORE]["data"]
|
| 222 |
-
# self.session_manager.create_session(USER_NAME)
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
| 226 |
|
| 227 |
|
| 228 |
def get_user_data(self, user_id):
|
|
@@ -347,9 +337,6 @@ class DeKCIBChatEngine:
|
|
| 347 |
self.chat_engine = None
|
| 348 |
self.rebuild_chat_engine_flag = True
|
| 349 |
|
| 350 |
-
|
| 351 |
-
# self.llm_name = DEFAULT_LLM
|
| 352 |
-
# self.vector_store_name = DEFAULT_VECTOR_STORE
|
| 353 |
|
| 354 |
# Conversation metadata management
|
| 355 |
self.convs_metadata = {}
|
|
@@ -381,10 +368,7 @@ class DeKCIBChatEngine:
|
|
| 381 |
with open(self.convs_metadata_file_path, "r") as f:
|
| 382 |
self.convs_metadata = json.load(f)
|
| 383 |
self.sorted_conversation_list = self.get_sorted_conversation_list()
|
| 384 |
-
|
| 385 |
-
# self.build_convs_metadata()
|
| 386 |
-
# with open(self.convs_metadata_file_path, "w") as f:
|
| 387 |
-
# json.dump(self.convs_metadata, f)
|
| 388 |
|
| 389 |
|
| 390 |
|
|
@@ -452,30 +436,17 @@ class DeKCIBChatEngine:
|
|
| 452 |
)
|
| 453 |
self.set_rebuild_chat_engine_flag(True)
|
| 454 |
|
| 455 |
-
|
| 456 |
-
self.llm_name = llm_name
|
| 457 |
-
if self.current_conv_id:
|
| 458 |
-
self.convs_metadata[self.current_conv_id].update({"llm_name":self.llm_name})
|
| 459 |
-
|
| 460 |
return self.llm
|
| 461 |
|
| 462 |
def set_vector_store(self, vector_store_name):
|
| 463 |
|
| 464 |
self.vector_store = vector_store_manager.get_vector_store_by_display_name(vector_store_name)
|
| 465 |
-
|
| 466 |
if self.vector_store:
|
| 467 |
self.initialize_index()
|
| 468 |
self.set_rebuild_chat_engine_flag(True)
|
| 469 |
|
| 470 |
-
self.vector_store_name = vector_store_name
|
| 471 |
-
|
| 472 |
-
|
| 473 |
-
if self.current_conv_id:
|
| 474 |
-
self.convs_metadata[self.current_conv_id].update({"vector_store_name":self.vector_store_name})
|
| 475 |
-
|
| 476 |
return self.vector_store
|
| 477 |
-
|
| 478 |
-
|
| 479 |
|
| 480 |
def initialize_index(self):
|
| 481 |
"""Initialize the index using the current vector store"""
|
|
@@ -517,7 +488,7 @@ class DeKCIBChatEngine:
|
|
| 517 |
|
| 518 |
|
| 519 |
print(f"Initializing memory for conversation {conversation_id}")
|
| 520 |
-
|
| 521 |
|
| 522 |
self.memory = ChatMemoryBuffer.from_defaults(
|
| 523 |
token_limit=3000,
|
|
@@ -546,8 +517,6 @@ class DeKCIBChatEngine:
|
|
| 546 |
system_prompt=SYSTEM_PROMPT
|
| 547 |
)
|
| 548 |
|
| 549 |
-
|
| 550 |
-
|
| 551 |
self.set_rebuild_chat_engine_flag(False)
|
| 552 |
return self.chat_engine
|
| 553 |
|
|
@@ -570,55 +539,12 @@ class DeKCIBChatEngine:
|
|
| 570 |
return []
|
| 571 |
|
| 572 |
|
| 573 |
-
|
| 574 |
-
def get_chat_history_for_ui(self, conversation_id):
|
| 575 |
-
"""Get chat history for a specific conversation"""
|
| 576 |
-
if conversation_id is None:
|
| 577 |
-
return []
|
| 578 |
-
if self.chat_store:
|
| 579 |
-
conv_data = self.chat_store.to_dict()["store"][conversation_id]
|
| 580 |
-
|
| 581 |
-
conv_data_for_ui = []
|
| 582 |
-
for item in conv_data:
|
| 583 |
-
if item["role"] == "user":
|
| 584 |
-
conv_data_for_ui.append(item)
|
| 585 |
-
else:
|
| 586 |
-
|
| 587 |
-
content = item["content"]
|
| 588 |
-
|
| 589 |
-
|
| 590 |
-
time_str = None
|
| 591 |
-
if "time" in item["additional_kwargs"]:
|
| 592 |
-
elapsed_time = item["additional_kwargs"]["time"]
|
| 593 |
-
time_str = f"\n\n[Total time: {elapsed_time:.2f}s]"
|
| 594 |
-
|
| 595 |
-
processed_answer_dict = process_text_with_think_tags(content)
|
| 596 |
-
|
| 597 |
-
if processed_answer_dict["has_two_parts"]:
|
| 598 |
-
think_content = processed_answer_dict["think_part"]
|
| 599 |
-
conv_data_for_ui.append({"role": "assistant", "content": think_content, "metadata":{"title":"Thinking...", "status":"done"}})
|
| 600 |
-
remaining_text = processed_answer_dict["regular_part"]
|
| 601 |
-
if time_str:
|
| 602 |
-
remaining_text += time_str
|
| 603 |
-
conv_data_for_ui.append({"role": "assistant", "content": remaining_text})
|
| 604 |
-
else:
|
| 605 |
-
item_copy = copy.deepcopy(item)
|
| 606 |
-
if time_str:
|
| 607 |
-
item_copy["content"] += time_str
|
| 608 |
-
conv_data_for_ui.append(item_copy)
|
| 609 |
-
return conv_data_for_ui
|
| 610 |
-
|
| 611 |
-
return []
|
| 612 |
-
|
| 613 |
-
|
| 614 |
-
|
| 615 |
-
|
| 616 |
def set_rebuild_chat_engine_flag(self, flag):
|
| 617 |
self.rebuild_chat_engine_flag = flag
|
| 618 |
|
| 619 |
def chat(self, message, conversation_id=None):
|
| 620 |
|
| 621 |
-
|
| 622 |
create_flag = False
|
| 623 |
if conversation_id is None:
|
| 624 |
conversation_id = self.create_conversation(message=message)
|
|
@@ -635,27 +561,31 @@ class DeKCIBChatEngine:
|
|
| 635 |
self.rebuild_chat_engine_flag = False
|
| 636 |
|
| 637 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 638 |
# Get response
|
| 639 |
response = self.chat_engine.chat(message)
|
| 640 |
|
| 641 |
-
|
| 642 |
-
|
| 643 |
-
elapsed_time = time.time() - start_time
|
| 644 |
-
|
| 645 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 646 |
|
| 647 |
-
answer_dict = self.chat_store.get_messages(conversation_id)[-1].dict()
|
| 648 |
-
answer_dict['additional_kwargs'].update({"time":elapsed_time})
|
| 649 |
-
new_msg = ChatMessage.model_validate(answer_dict)
|
| 650 |
-
self.chat_store.delete_message(conversation_id, -1)
|
| 651 |
-
self.chat_store.add_message(conversation_id, new_msg)
|
| 652 |
|
| 653 |
self.update_convs_metadata(conversation_id, create_flag=create_flag)
|
| 654 |
-
|
| 655 |
self.save_metadata()
|
| 656 |
-
|
| 657 |
self.save_chat_history()
|
| 658 |
-
|
| 659 |
|
| 660 |
return response
|
| 661 |
|
|
@@ -744,7 +674,34 @@ class DeKCIBChatEngine:
|
|
| 744 |
"""Get conversation metadata"""
|
| 745 |
return self.convs_metadata.get(conv_id, {})
|
| 746 |
|
| 747 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 748 |
|
| 749 |
def save_metadata(self):
|
| 750 |
"""Save conversation metadata to file"""
|
|
@@ -765,56 +722,46 @@ class DeKCIBChatEngine:
|
|
| 765 |
except Exception as e:
|
| 766 |
print(f"Error loading metadata: {e}")
|
| 767 |
|
| 768 |
-
|
| 769 |
-
|
| 770 |
-
|
| 771 |
-
if
|
| 772 |
-
|
| 773 |
-
|
| 774 |
-
|
| 775 |
-
|
| 776 |
-
|
| 777 |
-
|
| 778 |
-
|
| 779 |
-
self.update_convs_metadata(conversation_id)
|
| 780 |
-
self.save_metadata()
|
| 781 |
-
self.save_chat_history()
|
| 782 |
-
|
| 783 |
-
|
| 784 |
-
def retry_message(self, conversation_id):
|
| 785 |
-
if conversation_id is not None:
|
| 786 |
-
self.undo_message(conversation_id)
|
| 787 |
-
self.update_convs_metadata(conversation_id)
|
| 788 |
-
self.save_metadata()
|
| 789 |
-
self.save_chat_history()
|
| 790 |
-
|
| 791 |
-
|
| 792 |
-
|
| 793 |
-
def undo_message(self, conversation_id):
|
| 794 |
-
if conversation_id is not None:
|
| 795 |
-
msg_list = self.chat_store.get_messages(conversation_id)
|
| 796 |
-
|
| 797 |
-
|
| 798 |
-
if msg_list[-1].role == MessageRole.ASSISTANT and len(msg_list) > 0:
|
| 799 |
-
self.chat_store.delete_last_message(conversation_id)
|
| 800 |
-
if msg_list[-1].role == MessageRole.USER and len(msg_list) > 0:
|
| 801 |
-
self.chat_store.delete_last_message(conversation_id)
|
| 802 |
-
|
| 803 |
-
|
| 804 |
|
| 805 |
-
|
| 806 |
-
|
| 807 |
-
|
| 808 |
-
|
| 809 |
-
|
| 810 |
-
|
| 811 |
-
|
| 812 |
-
|
| 813 |
-
self.
|
| 814 |
-
|
| 815 |
-
|
| 816 |
-
|
| 817 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 818 |
|
| 819 |
|
| 820 |
|
|
@@ -850,6 +797,23 @@ class ChatbotUI:
|
|
| 850 |
def init_attr(self):
|
| 851 |
self.llm_options = self.dekcib_chatbot.llm_options
|
| 852 |
self.vector_stores = self.dekcib_chatbot.vector_stores
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 853 |
|
| 854 |
|
| 855 |
|
|
@@ -882,7 +846,7 @@ class ChatbotUI:
|
|
| 882 |
with gr.Column(scale=3):
|
| 883 |
vector_dropdown = gr.Dropdown(
|
| 884 |
label="Injury Biomechanics Knowledge Base",
|
| 885 |
-
choices=[(v["display_name"]) for k, v in self.vector_stores.items()],
|
| 886 |
value=next(iter(self.vector_stores.keys()), None)
|
| 887 |
|
| 888 |
)
|
|
@@ -909,7 +873,7 @@ class ChatbotUI:
|
|
| 909 |
type="index",
|
| 910 |
layout="table"
|
| 911 |
)
|
| 912 |
-
|
| 913 |
|
| 914 |
# Main chat area
|
| 915 |
with gr.Column(scale=3):
|
|
@@ -945,74 +909,62 @@ class ChatbotUI:
|
|
| 945 |
|
| 946 |
|
| 947 |
|
|
|
|
| 948 |
def chat_with_dekcib(history, user_id, conv_idx):
|
| 949 |
|
| 950 |
-
start_time = time.time()
|
| 951 |
|
| 952 |
msg = history[-1]["content"]
|
| 953 |
|
| 954 |
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 955 |
# user_engine.che
|
| 956 |
|
| 957 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 958 |
if conv_idx is not None:
|
| 959 |
conv_id = user_engine.sorted_conversation_list[conv_idx]
|
| 960 |
-
|
|
|
|
| 961 |
conv_id = None
|
| 962 |
|
| 963 |
-
# if len(history) == 1 and conv_idx is None:
|
| 964 |
-
# conv_id = None
|
| 965 |
|
| 966 |
|
| 967 |
response = user_engine.chat(msg, conv_id)
|
| 968 |
answer = response.response
|
| 969 |
|
| 970 |
processed_answer_dict = process_text_with_think_tags(answer)
|
| 971 |
-
|
| 972 |
-
|
| 973 |
if processed_answer_dict["has_two_parts"]:
|
| 974 |
think_content = processed_answer_dict["think_part"]
|
| 975 |
remaining_text = processed_answer_dict["regular_part"]
|
| 976 |
|
| 977 |
|
| 978 |
-
|
| 979 |
-
history.append({"role": "assistant", "content": "", "metadata":{"title":"Thinking...", "status":"pending"}})
|
| 980 |
-
# history.append(thick_msg)
|
| 981 |
for character in think_content:
|
| 982 |
-
history[-1]["
|
| 983 |
yield history
|
| 984 |
|
| 985 |
-
|
| 986 |
-
elapsed_time = time.time() - start_time
|
| 987 |
-
history[-1]["metadata"]["title"] = f"Thinking... [Thinking time: {elapsed_time:.2f}s]"
|
| 988 |
-
history[-1]["metadata"]["status"] = "done"
|
| 989 |
-
yield history
|
| 990 |
-
|
| 991 |
-
# Start response time measurement
|
| 992 |
-
|
| 993 |
history.append({"role": "assistant", "content": ""})
|
| 994 |
for character in remaining_text:
|
| 995 |
history[-1]["content"] += character
|
| 996 |
yield history
|
| 997 |
-
|
| 998 |
-
elapsed_time = time.time() - start_time
|
| 999 |
-
history[-1]["content"] += f"\n\n[Total time: {elapsed_time:.2f}s]"
|
| 1000 |
-
yield history
|
| 1001 |
-
|
| 1002 |
-
|
| 1003 |
else:
|
| 1004 |
full_text = processed_answer_dict["full_text"]
|
| 1005 |
-
|
| 1006 |
for character in full_text:
|
| 1007 |
history[-1]["content"] += character
|
| 1008 |
yield history
|
| 1009 |
-
|
| 1010 |
-
|
| 1011 |
-
|
| 1012 |
-
yield history
|
| 1013 |
|
| 1014 |
|
| 1015 |
def clear_msg():
|
|
|
|
| 1016 |
return ""
|
| 1017 |
|
| 1018 |
|
|
@@ -1049,32 +1001,47 @@ class ChatbotUI:
|
|
| 1049 |
|
| 1050 |
|
| 1051 |
def click_to_select_conversation(conversation_history, user_id):
|
| 1052 |
-
|
| 1053 |
-
|
| 1054 |
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1055 |
user_engine.set_current_conv_id(conversation_history, type="index")
|
| 1056 |
|
|
|
|
| 1057 |
|
| 1058 |
-
|
| 1059 |
-
|
| 1060 |
-
llm_name = user_engine.convs_metadata[user_engine.current_conv_id]["llm_name"]
|
| 1061 |
-
vector_store_name = user_engine.convs_metadata[user_engine.current_conv_id]["vector_store_name"]
|
| 1062 |
-
|
| 1063 |
-
return gr.update(value=conversation_history), chat_history, gr.update(value=llm_name), gr.update(value=vector_store_name)
|
| 1064 |
-
|
| 1065 |
-
|
| 1066 |
|
| 1067 |
|
| 1068 |
-
|
| 1069 |
-
|
| 1070 |
-
|
| 1071 |
conversation_history.click(
|
| 1072 |
click_to_select_conversation,
|
| 1073 |
[conversation_history, user_id],
|
| 1074 |
-
[conversation_history, chatbot
|
| 1075 |
)
|
| 1076 |
|
| 1077 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1078 |
|
| 1079 |
def create_session(user_id):
|
| 1080 |
if user_id is None:
|
|
@@ -1084,11 +1051,21 @@ class ChatbotUI:
|
|
| 1084 |
self.dekcib_chatbot.session_manager.create_session(user_id)
|
| 1085 |
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1086 |
|
|
|
|
|
|
|
|
|
|
| 1087 |
|
|
|
|
|
|
|
| 1088 |
|
| 1089 |
sorted_conversation_list = user_engine.get_sorted_conversation_list_for_ui()
|
| 1090 |
-
|
| 1091 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1092 |
|
| 1093 |
if len(sorted_conversation_list) > 0:
|
| 1094 |
index = 0
|
|
@@ -1097,10 +1074,8 @@ class ChatbotUI:
|
|
| 1097 |
update_conversation_history = gr.update(samples=sorted_conversation_list, value=index)
|
| 1098 |
|
| 1099 |
user_engine.set_current_conv_id(0, type="index")
|
| 1100 |
-
chat_history = user_engine.
|
| 1101 |
|
| 1102 |
-
llm_name = user_engine.convs_metadata[user_engine.current_conv_id]["llm_name"]
|
| 1103 |
-
vector_store_name = user_engine.convs_metadata[user_engine.current_conv_id]["vector_store_name"]
|
| 1104 |
|
| 1105 |
yield llm_name, vector_store_name, update_conversation_history, chat_history
|
| 1106 |
|
|
@@ -1124,7 +1099,6 @@ class ChatbotUI:
|
|
| 1124 |
user_engine.set_llm(llm_name)
|
| 1125 |
|
| 1126 |
|
| 1127 |
-
|
| 1128 |
llm_dropdown.change(
|
| 1129 |
update_llm,
|
| 1130 |
[user_id, llm_dropdown],
|
|
@@ -1146,119 +1120,8 @@ class ChatbotUI:
|
|
| 1146 |
[user_id, vector_dropdown],
|
| 1147 |
None
|
| 1148 |
)
|
| 1149 |
-
|
| 1150 |
-
|
| 1151 |
-
def edit_chat(user_id, history, edit_data: EditData):
|
| 1152 |
-
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1153 |
-
idx = edit_data.index
|
| 1154 |
-
|
| 1155 |
-
# Count how many user messages appear up to this index in the UI history
|
| 1156 |
-
user_message_count = 0
|
| 1157 |
-
for i in range(idx + 1):
|
| 1158 |
-
if history[i]["role"] == "user":
|
| 1159 |
-
user_message_count += 1
|
| 1160 |
-
|
| 1161 |
-
# In backend storage, user messages are at positions 0, 2, 4, 6...
|
| 1162 |
-
# So the backend index is (user_message_count - 1) * 2
|
| 1163 |
-
backend_idx = (user_message_count - 1) * 2
|
| 1164 |
-
|
| 1165 |
-
user_engine.edit_message(backend_idx, user_engine.current_conv_id)
|
| 1166 |
-
history = history[: idx+1]
|
| 1167 |
-
return history
|
| 1168 |
|
| 1169 |
-
|
| 1170 |
-
chatbot.edit(
|
| 1171 |
-
edit_chat,
|
| 1172 |
-
[user_id, chatbot],
|
| 1173 |
-
[chatbot]
|
| 1174 |
-
).success(
|
| 1175 |
-
chat_with_dekcib,
|
| 1176 |
-
[chatbot, user_id, conversation_history],
|
| 1177 |
-
[chatbot]
|
| 1178 |
-
).success(
|
| 1179 |
-
update_conversation_history,
|
| 1180 |
-
[user_id],
|
| 1181 |
-
[conversation_history]
|
| 1182 |
-
)
|
| 1183 |
-
|
| 1184 |
-
|
| 1185 |
-
|
| 1186 |
-
def retry_chat(user_id, history):
|
| 1187 |
-
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1188 |
-
user_engine.retry_message(user_engine.current_conv_id)
|
| 1189 |
-
|
| 1190 |
-
|
| 1191 |
-
while history[-1]["role"] == "assistant":
|
| 1192 |
-
history.pop()
|
| 1193 |
-
yield history
|
| 1194 |
-
|
| 1195 |
-
|
| 1196 |
-
return history
|
| 1197 |
-
|
| 1198 |
-
|
| 1199 |
-
|
| 1200 |
-
|
| 1201 |
-
|
| 1202 |
-
chatbot.retry(
|
| 1203 |
-
retry_chat,
|
| 1204 |
-
[user_id, chatbot],
|
| 1205 |
-
[chatbot]
|
| 1206 |
-
).then(
|
| 1207 |
-
chat_with_dekcib,
|
| 1208 |
-
[chatbot, user_id, conversation_history],
|
| 1209 |
-
[chatbot]
|
| 1210 |
-
).then(
|
| 1211 |
-
update_conversation_history,
|
| 1212 |
-
[user_id],
|
| 1213 |
-
[conversation_history]
|
| 1214 |
-
)
|
| 1215 |
-
|
| 1216 |
-
|
| 1217 |
-
def undo_chat(user_id):
|
| 1218 |
-
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1219 |
-
user_engine.undo_message(user_engine.current_conv_id)
|
| 1220 |
-
|
| 1221 |
-
chat_history = user_engine.get_chat_history_for_ui(user_engine.current_conv_id)
|
| 1222 |
-
|
| 1223 |
-
return chat_history
|
| 1224 |
-
|
| 1225 |
-
chatbot.undo(
|
| 1226 |
-
undo_chat,
|
| 1227 |
-
[user_id],
|
| 1228 |
-
[chatbot]
|
| 1229 |
-
)
|
| 1230 |
|
| 1231 |
-
|
| 1232 |
-
|
| 1233 |
-
def clear_conversation(user_id):
|
| 1234 |
-
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1235 |
-
user_engine.delete_conversation(user_engine.current_conv_id)
|
| 1236 |
-
|
| 1237 |
-
sorted_conversation_list = user_engine.get_sorted_conversation_list_for_ui()
|
| 1238 |
-
|
| 1239 |
-
if len(sorted_conversation_list) > 0:
|
| 1240 |
-
index = 0
|
| 1241 |
-
else:
|
| 1242 |
-
index = None
|
| 1243 |
-
update_conversation_history = gr.update(samples=sorted_conversation_list, value=index)
|
| 1244 |
-
|
| 1245 |
-
|
| 1246 |
-
|
| 1247 |
-
user_engine.set_current_conv_id(index, type="index")
|
| 1248 |
-
chat_history = user_engine.get_chat_history_for_ui(user_engine.current_conv_id)
|
| 1249 |
-
|
| 1250 |
-
yield update_conversation_history, chat_history
|
| 1251 |
-
|
| 1252 |
-
|
| 1253 |
-
|
| 1254 |
-
chatbot.clear(
|
| 1255 |
-
clear_conversation,
|
| 1256 |
-
[user_id],
|
| 1257 |
-
[conversation_history, chatbot]
|
| 1258 |
-
)
|
| 1259 |
-
|
| 1260 |
-
|
| 1261 |
-
|
| 1262 |
|
| 1263 |
# Create new conversation button should only clear the chat area, but not create a new conversation yet
|
| 1264 |
def prepare_new_chat():
|
|
@@ -1267,6 +1130,7 @@ class ChatbotUI:
|
|
| 1267 |
return [], gr.update(value=None)
|
| 1268 |
|
| 1269 |
def print_dataset(value):
|
|
|
|
| 1270 |
print(value)
|
| 1271 |
|
| 1272 |
# Create new conversation
|
|
@@ -1286,11 +1150,17 @@ class ChatbotUI:
|
|
| 1286 |
|
| 1287 |
# Deployment settings
|
| 1288 |
if __name__ == "__main__":
|
| 1289 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1290 |
|
| 1291 |
dekcib_chatbot = DEKCIBChatbot()
|
| 1292 |
-
|
| 1293 |
ui = ChatbotUI(dekcib_chatbot)
|
| 1294 |
demo = ui.create_ui()
|
| 1295 |
demo.queue(max_size=10, default_concurrency_limit=3)
|
| 1296 |
demo.launch(allowed_paths=["logo.png"])
|
|
|
|
|
|
| 17 |
import gradio as gr
|
| 18 |
from gradio.themes.utils import colors, fonts, sizes
|
| 19 |
from gradio.themes import Base
|
|
|
|
| 20 |
from huggingface_hub import whoami
|
| 21 |
import re
|
| 22 |
|
|
|
|
| 175 |
"""Find a vector store by its display name"""
|
| 176 |
for name, store_info in self.vector_stores.items():
|
| 177 |
if store_info["display_name"] == display_name:
|
|
|
|
| 178 |
return self.vector_stores[name]["data"]
|
| 179 |
return None
|
| 180 |
|
|
|
|
| 199 |
|
| 200 |
|
| 201 |
|
|
|
|
|
|
|
| 202 |
class DEKCIBChatbot:
|
| 203 |
def __init__(self):
|
| 204 |
self.initialize()
|
| 205 |
|
| 206 |
|
| 207 |
def initialize(self):
|
|
|
|
| 208 |
self.session_manager = SessionManager()
|
| 209 |
self.embed_model = OptimumEmbedding(folder_name=EMBED_MODEL_PATH)
|
| 210 |
Settings.embed_model = self.embed_model
|
|
|
|
| 213 |
self.config = self._load_config()
|
| 214 |
self.llm_options = self._initialize_models()
|
| 215 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 216 |
|
| 217 |
|
| 218 |
def get_user_data(self, user_id):
|
|
|
|
| 337 |
self.chat_engine = None
|
| 338 |
self.rebuild_chat_engine_flag = True
|
| 339 |
|
|
|
|
|
|
|
|
|
|
| 340 |
|
| 341 |
# Conversation metadata management
|
| 342 |
self.convs_metadata = {}
|
|
|
|
| 368 |
with open(self.convs_metadata_file_path, "r") as f:
|
| 369 |
self.convs_metadata = json.load(f)
|
| 370 |
self.sorted_conversation_list = self.get_sorted_conversation_list()
|
| 371 |
+
|
|
|
|
|
|
|
|
|
|
| 372 |
|
| 373 |
|
| 374 |
|
|
|
|
| 436 |
)
|
| 437 |
self.set_rebuild_chat_engine_flag(True)
|
| 438 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 439 |
return self.llm
|
| 440 |
|
| 441 |
def set_vector_store(self, vector_store_name):
|
| 442 |
|
| 443 |
self.vector_store = vector_store_manager.get_vector_store_by_display_name(vector_store_name)
|
| 444 |
+
|
| 445 |
if self.vector_store:
|
| 446 |
self.initialize_index()
|
| 447 |
self.set_rebuild_chat_engine_flag(True)
|
| 448 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 449 |
return self.vector_store
|
|
|
|
|
|
|
| 450 |
|
| 451 |
def initialize_index(self):
|
| 452 |
"""Initialize the index using the current vector store"""
|
|
|
|
| 488 |
|
| 489 |
|
| 490 |
print(f"Initializing memory for conversation {conversation_id}")
|
| 491 |
+
print(self.chat_store)
|
| 492 |
|
| 493 |
self.memory = ChatMemoryBuffer.from_defaults(
|
| 494 |
token_limit=3000,
|
|
|
|
| 517 |
system_prompt=SYSTEM_PROMPT
|
| 518 |
)
|
| 519 |
|
|
|
|
|
|
|
| 520 |
self.set_rebuild_chat_engine_flag(False)
|
| 521 |
return self.chat_engine
|
| 522 |
|
|
|
|
| 539 |
return []
|
| 540 |
|
| 541 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 542 |
def set_rebuild_chat_engine_flag(self, flag):
|
| 543 |
self.rebuild_chat_engine_flag = flag
|
| 544 |
|
| 545 |
def chat(self, message, conversation_id=None):
|
| 546 |
|
| 547 |
+
|
| 548 |
create_flag = False
|
| 549 |
if conversation_id is None:
|
| 550 |
conversation_id = self.create_conversation(message=message)
|
|
|
|
| 561 |
self.rebuild_chat_engine_flag = False
|
| 562 |
|
| 563 |
|
| 564 |
+
print("user message")
|
| 565 |
+
# user_msg = ChatMessage(role=MessageRole.USER, content=message)
|
| 566 |
+
# self.add_message(conversation_id, user_msg)
|
| 567 |
+
|
| 568 |
+
|
| 569 |
+
print("L597")
|
| 570 |
+
print(message)
|
| 571 |
# Get response
|
| 572 |
response = self.chat_engine.chat(message)
|
| 573 |
|
| 574 |
+
answer = response.response
|
|
|
|
|
|
|
|
|
|
| 575 |
|
| 576 |
+
print(answer)
|
| 577 |
+
print(type(answer))
|
| 578 |
+
print("assistant message")
|
| 579 |
+
# assistant_msg = ChatMessage(role=MessageRole.ASSISTANT, content=answer)
|
| 580 |
+
# self.add_message(conversation_id, assistant_msg)
|
| 581 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 582 |
|
| 583 |
self.update_convs_metadata(conversation_id, create_flag=create_flag)
|
| 584 |
+
print("update_convs_metadata")
|
| 585 |
self.save_metadata()
|
| 586 |
+
print("save_metadata")
|
| 587 |
self.save_chat_history()
|
| 588 |
+
print("save_chat_history")
|
| 589 |
|
| 590 |
return response
|
| 591 |
|
|
|
|
| 674 |
"""Get conversation metadata"""
|
| 675 |
return self.convs_metadata.get(conv_id, {})
|
| 676 |
|
| 677 |
+
def switch_conversation(self, conv_id):
|
| 678 |
+
"""
|
| 679 |
+
Switch to an existing conversation
|
| 680 |
+
Args:
|
| 681 |
+
conv_id: Conversation ID to switch to
|
| 682 |
+
Returns:
|
| 683 |
+
True if successful, False otherwise
|
| 684 |
+
"""
|
| 685 |
+
if conv_id not in self.convs_metadata:
|
| 686 |
+
return False
|
| 687 |
+
|
| 688 |
+
# Set as current conversation
|
| 689 |
+
self.current_conv_id = conv_id
|
| 690 |
+
|
| 691 |
+
# Get the conversation's LLM and vector store
|
| 692 |
+
metadata = self.convs_metadata[conv_id]
|
| 693 |
+
|
| 694 |
+
# Switch to the conversation's resources if they're different
|
| 695 |
+
if metadata.get("llm") and metadata["llm"] != self.llm_name:
|
| 696 |
+
self.set_llm(metadata["llm"])
|
| 697 |
+
|
| 698 |
+
if metadata.get("vector_store") and metadata["vector_store"] != self.vector_store_name:
|
| 699 |
+
self.set_vector_store(metadata["vector_store"])
|
| 700 |
+
|
| 701 |
+
# Rebuild chat engine with this conversation ID
|
| 702 |
+
self.build_chat_engine(conv_id)
|
| 703 |
+
|
| 704 |
+
return True
|
| 705 |
|
| 706 |
def save_metadata(self):
|
| 707 |
"""Save conversation metadata to file"""
|
|
|
|
| 722 |
except Exception as e:
|
| 723 |
print(f"Error loading metadata: {e}")
|
| 724 |
|
| 725 |
+
def get_or_build_chat_engine(self, conversation_id=None, llm_name=None, vector_store_name=None):
|
| 726 |
+
"""
|
| 727 |
+
Check if the chat engine needs to be rebuilt based on changes to LLM, vector store or conversation ID.
|
| 728 |
+
Only rebuilds the chat engine if necessary to avoid performance overhead.
|
| 729 |
+
|
| 730 |
+
Args:
|
| 731 |
+
conversation_id: The conversation ID to use
|
| 732 |
+
llm_name: The LLM model name to use
|
| 733 |
+
vector_store_name: The vector store name to use
|
| 734 |
+
system_prompt: Custom system prompt (optional)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 735 |
|
| 736 |
+
Returns:
|
| 737 |
+
The existing or newly built chat engine
|
| 738 |
+
"""
|
| 739 |
+
rebuild_needed = False
|
| 740 |
+
|
| 741 |
+
# Check if conversation ID changed
|
| 742 |
+
if conversation_id is not None and self.conv_id != conversation_id:
|
| 743 |
+
print(f"Building chat engine: Conversation ID changed from {self.conv_id} to {conversation_id}")
|
| 744 |
+
self.conv_id = conversation_id
|
| 745 |
+
rebuild_needed = True
|
| 746 |
+
|
| 747 |
+
# Check if LLM changed
|
| 748 |
+
if llm_name is not None and self.llm_name != llm_name:
|
| 749 |
+
print(f"Building chat engine: LLM changed from {self.llm_name} to {llm_name}")
|
| 750 |
+
self.set_llm(llm_name)
|
| 751 |
+
rebuild_needed = True
|
| 752 |
+
|
| 753 |
+
# Check if vector store changed
|
| 754 |
+
if vector_store_name is not None and self.vector_store_name != vector_store_name:
|
| 755 |
+
print(f"Building chat engine: Vector store changed from {self.vector_store_name} to {vector_store_name}")
|
| 756 |
+
self.set_vector_store(vector_store_name)
|
| 757 |
+
rebuild_needed = True
|
| 758 |
+
|
| 759 |
+
# Rebuild only if needed
|
| 760 |
+
if rebuild_needed:
|
| 761 |
+
return self.build_chat_engine(conversation_id)
|
| 762 |
+
else:
|
| 763 |
+
print("Using existing chat engine: No changes detected")
|
| 764 |
+
return self.chat_engine
|
| 765 |
|
| 766 |
|
| 767 |
|
|
|
|
| 797 |
def init_attr(self):
|
| 798 |
self.llm_options = self.dekcib_chatbot.llm_options
|
| 799 |
self.vector_stores = self.dekcib_chatbot.vector_stores
|
| 800 |
+
# self.vector_stores_options = [(v["display_name"], k) for k, v in self.dekcib_chatbot.vector_stores.items()]
|
| 801 |
+
|
| 802 |
+
|
| 803 |
+
# self.init_conversations_history()
|
| 804 |
+
|
| 805 |
+
|
| 806 |
+
# def init_conversations_history(self):
|
| 807 |
+
# chat_session = self.dekcib_chatbot.session_manager.sessions[USER_NAME]
|
| 808 |
+
# self.init_convs_list = chat_session.get_sorted_conversation_list_for_ui()
|
| 809 |
+
# if len(self.init_convs_list) > 0:
|
| 810 |
+
# self.init_chat_history = chat_session.get_chat_history(chat_session.sorted_conversation_list[0])
|
| 811 |
+
# self.init_convs_index = 0
|
| 812 |
+
# else:
|
| 813 |
+
# self.init_chat_history = []
|
| 814 |
+
# self.init_convs_index = None
|
| 815 |
+
|
| 816 |
+
|
| 817 |
|
| 818 |
|
| 819 |
|
|
|
|
| 846 |
with gr.Column(scale=3):
|
| 847 |
vector_dropdown = gr.Dropdown(
|
| 848 |
label="Injury Biomechanics Knowledge Base",
|
| 849 |
+
choices=[(v["display_name"], k) for k, v in self.vector_stores.items()],
|
| 850 |
value=next(iter(self.vector_stores.keys()), None)
|
| 851 |
|
| 852 |
)
|
|
|
|
| 873 |
type="index",
|
| 874 |
layout="table"
|
| 875 |
)
|
| 876 |
+
|
| 877 |
|
| 878 |
# Main chat area
|
| 879 |
with gr.Column(scale=3):
|
|
|
|
| 909 |
|
| 910 |
|
| 911 |
|
| 912 |
+
|
| 913 |
def chat_with_dekcib(history, user_id, conv_idx):
|
| 914 |
|
|
|
|
| 915 |
|
| 916 |
msg = history[-1]["content"]
|
| 917 |
|
| 918 |
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 919 |
# user_engine.che
|
| 920 |
|
| 921 |
+
|
| 922 |
+
print("conv_idx")
|
| 923 |
+
print(conv_idx)
|
| 924 |
+
|
| 925 |
+
|
| 926 |
+
conv_id = None
|
| 927 |
if conv_idx is not None:
|
| 928 |
conv_id = user_engine.sorted_conversation_list[conv_idx]
|
| 929 |
+
|
| 930 |
+
if len(history) == 1:
|
| 931 |
conv_id = None
|
| 932 |
|
|
|
|
|
|
|
| 933 |
|
| 934 |
|
| 935 |
response = user_engine.chat(msg, conv_id)
|
| 936 |
answer = response.response
|
| 937 |
|
| 938 |
processed_answer_dict = process_text_with_think_tags(answer)
|
| 939 |
+
|
|
|
|
| 940 |
if processed_answer_dict["has_two_parts"]:
|
| 941 |
think_content = processed_answer_dict["think_part"]
|
| 942 |
remaining_text = processed_answer_dict["regular_part"]
|
| 943 |
|
| 944 |
|
| 945 |
+
history.append({"role": "assistant", "content": "", "metadata":{"Thinking...": ""}})
|
|
|
|
|
|
|
| 946 |
for character in think_content:
|
| 947 |
+
history[-1]["metadata"]["Thinking..."] += character
|
| 948 |
yield history
|
| 949 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 950 |
history.append({"role": "assistant", "content": ""})
|
| 951 |
for character in remaining_text:
|
| 952 |
history[-1]["content"] += character
|
| 953 |
yield history
|
| 954 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 955 |
else:
|
| 956 |
full_text = processed_answer_dict["full_text"]
|
| 957 |
+
|
| 958 |
for character in full_text:
|
| 959 |
history[-1]["content"] += character
|
| 960 |
yield history
|
| 961 |
+
|
| 962 |
+
|
| 963 |
+
|
|
|
|
| 964 |
|
| 965 |
|
| 966 |
def clear_msg():
|
| 967 |
+
print("clear_msg")
|
| 968 |
return ""
|
| 969 |
|
| 970 |
|
|
|
|
| 1001 |
|
| 1002 |
|
| 1003 |
def click_to_select_conversation(conversation_history, user_id):
|
|
|
|
|
|
|
| 1004 |
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1005 |
user_engine.set_current_conv_id(conversation_history, type="index")
|
| 1006 |
|
| 1007 |
+
chat_history = user_engine.get_chat_history(user_engine.current_conv_id)
|
| 1008 |
|
| 1009 |
+
return gr.update(value=conversation_history), chat_history
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1010 |
|
| 1011 |
|
|
|
|
|
|
|
|
|
|
| 1012 |
conversation_history.click(
|
| 1013 |
click_to_select_conversation,
|
| 1014 |
[conversation_history, user_id],
|
| 1015 |
+
[conversation_history, chatbot]
|
| 1016 |
)
|
| 1017 |
|
| 1018 |
|
| 1019 |
+
# msg.submit(
|
| 1020 |
+
# chat_with_dekcib,
|
| 1021 |
+
# [msg, chatbot, user_id_dropdown],
|
| 1022 |
+
# [chatbot]
|
| 1023 |
+
# )
|
| 1024 |
+
|
| 1025 |
+
|
| 1026 |
+
|
| 1027 |
+
# msg.submit(
|
| 1028 |
+
# clear_msg,
|
| 1029 |
+
# None,
|
| 1030 |
+
# [msg]
|
| 1031 |
+
# ).then(
|
| 1032 |
+
# chat_with_dekcib,
|
| 1033 |
+
# [msg, chatbot, user_id_dropdown],
|
| 1034 |
+
# [chatbot]
|
| 1035 |
+
# )
|
| 1036 |
+
|
| 1037 |
+
|
| 1038 |
+
# clear_btn.click(
|
| 1039 |
+
# clear_session,
|
| 1040 |
+
# [session_state],
|
| 1041 |
+
# [chatbot, session_state],
|
| 1042 |
+
# queue=False
|
| 1043 |
+
# )
|
| 1044 |
+
|
| 1045 |
|
| 1046 |
def create_session(user_id):
|
| 1047 |
if user_id is None:
|
|
|
|
| 1051 |
self.dekcib_chatbot.session_manager.create_session(user_id)
|
| 1052 |
user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
|
| 1053 |
|
| 1054 |
+
llm_name = user_engine.llm_name
|
| 1055 |
+
vector_store_name = user_engine.vector_store_name
|
| 1056 |
+
# chat_store = user_engine.chat_store
|
| 1057 |
|
| 1058 |
+
# convs = user_engine.convs
|
| 1059 |
+
# history = user_engine.history
|
| 1060 |
|
| 1061 |
sorted_conversation_list = user_engine.get_sorted_conversation_list_for_ui()
|
| 1062 |
+
print("sorted_conversation_list")
|
| 1063 |
+
print(sorted_conversation_list)
|
| 1064 |
+
# sorted_conversation_list = [
|
| 1065 |
+
# ["I think therefore I am."],
|
| 1066 |
+
# ["The unexamined life is not worth living."],
|
| 1067 |
+
# ["Test Item"]
|
| 1068 |
+
# ]
|
| 1069 |
|
| 1070 |
if len(sorted_conversation_list) > 0:
|
| 1071 |
index = 0
|
|
|
|
| 1074 |
update_conversation_history = gr.update(samples=sorted_conversation_list, value=index)
|
| 1075 |
|
| 1076 |
user_engine.set_current_conv_id(0, type="index")
|
| 1077 |
+
chat_history = user_engine.get_chat_history(user_engine.current_conv_id)
|
| 1078 |
|
|
|
|
|
|
|
| 1079 |
|
| 1080 |
yield llm_name, vector_store_name, update_conversation_history, chat_history
|
| 1081 |
|
|
|
|
| 1099 |
user_engine.set_llm(llm_name)
|
| 1100 |
|
| 1101 |
|
|
|
|
| 1102 |
llm_dropdown.change(
|
| 1103 |
update_llm,
|
| 1104 |
[user_id, llm_dropdown],
|
|
|
|
| 1120 |
[user_id, vector_dropdown],
|
| 1121 |
None
|
| 1122 |
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1123 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1124 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1125 |
|
| 1126 |
# Create new conversation button should only clear the chat area, but not create a new conversation yet
|
| 1127 |
def prepare_new_chat():
|
|
|
|
| 1130 |
return [], gr.update(value=None)
|
| 1131 |
|
| 1132 |
def print_dataset(value):
|
| 1133 |
+
print("value")
|
| 1134 |
print(value)
|
| 1135 |
|
| 1136 |
# Create new conversation
|
|
|
|
| 1150 |
|
| 1151 |
# Deployment settings
|
| 1152 |
if __name__ == "__main__":
|
| 1153 |
+
# Check chat store health
|
| 1154 |
+
# store_health_ok = check_chat_store_health()
|
| 1155 |
+
# if not store_health_ok:
|
| 1156 |
+
# print("WARNING: Chat store health check failed! Some functionality may not work correctly.")
|
| 1157 |
+
|
| 1158 |
+
# # Run warm-up to pre-initialize resources
|
| 1159 |
+
# warm_up_resources()
|
| 1160 |
|
| 1161 |
dekcib_chatbot = DEKCIBChatbot()
|
|
|
|
| 1162 |
ui = ChatbotUI(dekcib_chatbot)
|
| 1163 |
demo = ui.create_ui()
|
| 1164 |
demo.queue(max_size=10, default_concurrency_limit=3)
|
| 1165 |
demo.launch(allowed_paths=["logo.png"])
|
| 1166 |
+
|