InnovisionLLC commited on
Commit
b424eed
·
1 Parent(s): b49efba

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +323 -113
app.py CHANGED
@@ -436,16 +436,27 @@ class DeKCIBChatEngine:
436
  )
437
  self.set_rebuild_chat_engine_flag(True)
438
 
 
 
 
 
 
439
  return self.llm
440
 
441
  def set_vector_store(self, vector_store_name):
442
 
443
  self.vector_store = vector_store_manager.get_vector_store_by_display_name(vector_store_name)
444
-
445
  if self.vector_store:
446
  self.initialize_index()
447
  self.set_rebuild_chat_engine_flag(True)
448
 
 
 
 
 
 
 
449
  return self.vector_store
450
 
451
  def initialize_index(self):
@@ -537,6 +548,46 @@ class DeKCIBChatEngine:
537
  if self.chat_store:
538
  return self.chat_store.to_dict()["store"][conversation_id]
539
  return []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
540
 
541
 
542
  def set_rebuild_chat_engine_flag(self, flag):
@@ -544,7 +595,7 @@ class DeKCIBChatEngine:
544
 
545
  def chat(self, message, conversation_id=None):
546
 
547
-
548
  create_flag = False
549
  if conversation_id is None:
550
  conversation_id = self.create_conversation(message=message)
@@ -561,31 +612,27 @@ class DeKCIBChatEngine:
561
  self.rebuild_chat_engine_flag = False
562
 
563
 
564
- print("user message")
565
- # user_msg = ChatMessage(role=MessageRole.USER, content=message)
566
- # self.add_message(conversation_id, user_msg)
567
-
568
-
569
- print("L597")
570
- print(message)
571
  # Get response
572
  response = self.chat_engine.chat(message)
573
 
574
- answer = response.response
575
 
576
- print(answer)
577
- print(type(answer))
578
- print("assistant message")
579
- # assistant_msg = ChatMessage(role=MessageRole.ASSISTANT, content=answer)
580
- # self.add_message(conversation_id, assistant_msg)
581
 
582
 
 
 
 
 
 
 
 
583
  self.update_convs_metadata(conversation_id, create_flag=create_flag)
584
- print("update_convs_metadata")
585
  self.save_metadata()
586
- print("save_metadata")
587
  self.save_chat_history()
588
- print("save_chat_history")
589
 
590
  return response
591
 
@@ -674,34 +721,7 @@ class DeKCIBChatEngine:
674
  """Get conversation metadata"""
675
  return self.convs_metadata.get(conv_id, {})
676
 
677
- def switch_conversation(self, conv_id):
678
- """
679
- Switch to an existing conversation
680
- Args:
681
- conv_id: Conversation ID to switch to
682
- Returns:
683
- True if successful, False otherwise
684
- """
685
- if conv_id not in self.convs_metadata:
686
- return False
687
-
688
- # Set as current conversation
689
- self.current_conv_id = conv_id
690
-
691
- # Get the conversation's LLM and vector store
692
- metadata = self.convs_metadata[conv_id]
693
-
694
- # Switch to the conversation's resources if they're different
695
- if metadata.get("llm") and metadata["llm"] != self.llm_name:
696
- self.set_llm(metadata["llm"])
697
-
698
- if metadata.get("vector_store") and metadata["vector_store"] != self.vector_store_name:
699
- self.set_vector_store(metadata["vector_store"])
700
-
701
- # Rebuild chat engine with this conversation ID
702
- self.build_chat_engine(conv_id)
703
-
704
- return True
705
 
706
  def save_metadata(self):
707
  """Save conversation metadata to file"""
@@ -722,47 +742,54 @@ class DeKCIBChatEngine:
722
  except Exception as e:
723
  print(f"Error loading metadata: {e}")
724
 
725
- def get_or_build_chat_engine(self, conversation_id=None, llm_name=None, vector_store_name=None):
726
- """
727
- Check if the chat engine needs to be rebuilt based on changes to LLM, vector store or conversation ID.
728
- Only rebuilds the chat engine if necessary to avoid performance overhead.
729
-
730
- Args:
731
- conversation_id: The conversation ID to use
732
- llm_name: The LLM model name to use
733
- vector_store_name: The vector store name to use
734
- system_prompt: Custom system prompt (optional)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
735
 
736
- Returns:
737
- The existing or newly built chat engine
738
- """
739
- rebuild_needed = False
740
-
741
- # Check if conversation ID changed
742
- if conversation_id is not None and self.conv_id != conversation_id:
743
- print(f"Building chat engine: Conversation ID changed from {self.conv_id} to {conversation_id}")
744
- self.conv_id = conversation_id
745
- rebuild_needed = True
746
-
747
- # Check if LLM changed
748
- if llm_name is not None and self.llm_name != llm_name:
749
- print(f"Building chat engine: LLM changed from {self.llm_name} to {llm_name}")
750
- self.set_llm(llm_name)
751
- rebuild_needed = True
752
-
753
- # Check if vector store changed
754
- if vector_store_name is not None and self.vector_store_name != vector_store_name:
755
- print(f"Building chat engine: Vector store changed from {self.vector_store_name} to {vector_store_name}")
756
- self.set_vector_store(vector_store_name)
757
- rebuild_needed = True
758
-
759
- # Rebuild only if needed
760
- if rebuild_needed:
761
- return self.build_chat_engine(conversation_id)
762
- else:
763
- print("Using existing chat engine: No changes detected")
764
- return self.chat_engine
765
 
 
 
 
 
 
 
 
 
 
 
 
 
766
 
767
 
768
 
@@ -846,7 +873,7 @@ class ChatbotUI:
846
  with gr.Column(scale=3):
847
  vector_dropdown = gr.Dropdown(
848
  label="Injury Biomechanics Knowledge Base",
849
- choices=[(v["display_name"], k) for k, v in self.vector_stores.items()],
850
  value=next(iter(self.vector_stores.keys()), None)
851
 
852
  )
@@ -909,62 +936,74 @@ class ChatbotUI:
909
 
910
 
911
 
912
-
913
  def chat_with_dekcib(history, user_id, conv_idx):
914
 
 
915
 
916
  msg = history[-1]["content"]
917
 
918
  user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
919
  # user_engine.che
920
 
921
-
922
- print("conv_idx")
923
- print(conv_idx)
924
-
925
-
926
- conv_id = None
927
  if conv_idx is not None:
928
  conv_id = user_engine.sorted_conversation_list[conv_idx]
929
-
930
- if len(history) == 1:
931
  conv_id = None
932
 
 
 
933
 
934
 
935
  response = user_engine.chat(msg, conv_id)
936
  answer = response.response
937
 
938
  processed_answer_dict = process_text_with_think_tags(answer)
939
-
 
940
  if processed_answer_dict["has_two_parts"]:
941
  think_content = processed_answer_dict["think_part"]
942
  remaining_text = processed_answer_dict["regular_part"]
943
 
944
 
945
- history.append({"role": "assistant", "content": "", "metadata":{"Thinking...": ""}})
 
 
946
  for character in think_content:
947
- history[-1]["metadata"]["Thinking..."] += character
948
  yield history
949
 
 
 
 
 
 
 
 
 
950
  history.append({"role": "assistant", "content": ""})
951
  for character in remaining_text:
952
  history[-1]["content"] += character
953
  yield history
954
-
 
 
 
 
 
955
  else:
956
  full_text = processed_answer_dict["full_text"]
957
-
958
  for character in full_text:
959
  history[-1]["content"] += character
960
  yield history
961
-
962
-
963
-
 
964
 
965
 
966
  def clear_msg():
967
- print("clear_msg")
968
  return ""
969
 
970
 
@@ -1001,18 +1040,29 @@ class ChatbotUI:
1001
 
1002
 
1003
  def click_to_select_conversation(conversation_history, user_id):
 
 
1004
  user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1005
  user_engine.set_current_conv_id(conversation_history, type="index")
1006
 
1007
- chat_history = user_engine.get_chat_history(user_engine.current_conv_id)
1008
 
1009
- return gr.update(value=conversation_history), chat_history
 
 
 
 
 
 
 
1010
 
1011
 
 
 
 
1012
  conversation_history.click(
1013
  click_to_select_conversation,
1014
  [conversation_history, user_id],
1015
- [conversation_history, chatbot]
1016
  )
1017
 
1018
 
@@ -1051,16 +1101,14 @@ class ChatbotUI:
1051
  self.dekcib_chatbot.session_manager.create_session(user_id)
1052
  user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1053
 
1054
- llm_name = user_engine.llm_name
1055
- vector_store_name = user_engine.vector_store_name
1056
  # chat_store = user_engine.chat_store
1057
 
1058
  # convs = user_engine.convs
1059
  # history = user_engine.history
1060
 
1061
  sorted_conversation_list = user_engine.get_sorted_conversation_list_for_ui()
1062
- print("sorted_conversation_list")
1063
- print(sorted_conversation_list)
1064
  # sorted_conversation_list = [
1065
  # ["I think therefore I am."],
1066
  # ["The unexamined life is not worth living."],
@@ -1074,8 +1122,10 @@ class ChatbotUI:
1074
  update_conversation_history = gr.update(samples=sorted_conversation_list, value=index)
1075
 
1076
  user_engine.set_current_conv_id(0, type="index")
1077
- chat_history = user_engine.get_chat_history(user_engine.current_conv_id)
1078
 
 
 
1079
 
1080
  yield llm_name, vector_store_name, update_conversation_history, chat_history
1081
 
@@ -1099,6 +1149,7 @@ class ChatbotUI:
1099
  user_engine.set_llm(llm_name)
1100
 
1101
 
 
1102
  llm_dropdown.change(
1103
  update_llm,
1104
  [user_id, llm_dropdown],
@@ -1120,8 +1171,168 @@ class ChatbotUI:
1120
  [user_id, vector_dropdown],
1121
  None
1122
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1123
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1124
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1125
 
1126
  # Create new conversation button should only clear the chat area, but not create a new conversation yet
1127
  def prepare_new_chat():
@@ -1130,7 +1341,6 @@ class ChatbotUI:
1130
  return [], gr.update(value=None)
1131
 
1132
  def print_dataset(value):
1133
- print("value")
1134
  print(value)
1135
 
1136
  # Create new conversation
 
436
  )
437
  self.set_rebuild_chat_engine_flag(True)
438
 
439
+
440
+ self.llm_name = llm_name
441
+ if self.current_conv_id:
442
+ self.convs_metadata[self.current_conv_id].update({"llm_name":self.llm_name})
443
+
444
  return self.llm
445
 
446
  def set_vector_store(self, vector_store_name):
447
 
448
  self.vector_store = vector_store_manager.get_vector_store_by_display_name(vector_store_name)
449
+
450
  if self.vector_store:
451
  self.initialize_index()
452
  self.set_rebuild_chat_engine_flag(True)
453
 
454
+ self.vector_store_name = vector_store_name
455
+
456
+
457
+ if self.current_conv_id:
458
+ self.convs_metadata[self.current_conv_id].update({"vector_store_name":self.vector_store_name})
459
+
460
  return self.vector_store
461
 
462
  def initialize_index(self):
 
548
  if self.chat_store:
549
  return self.chat_store.to_dict()["store"][conversation_id]
550
  return []
551
+
552
+
553
+ def get_chat_history_for_ui(self, conversation_id):
554
+ """Get chat history for a specific conversation"""
555
+ if conversation_id is None:
556
+ return []
557
+ if self.chat_store:
558
+ conv_data = self.chat_store.to_dict()["store"][conversation_id]
559
+
560
+ conv_data_for_ui = []
561
+ for item in conv_data:
562
+ if item["role"] == "user":
563
+ conv_data_for_ui.append(item)
564
+ else:
565
+
566
+ content = item["content"]
567
+
568
+
569
+ time_str = None
570
+ if "time" in item["additional_kwargs"]:
571
+ elapsed_time = item["additional_kwargs"]["time"]
572
+ time_str = f"\n\n[Total time: {elapsed_time:.2f}s]"
573
+
574
+ processed_answer_dict = process_text_with_think_tags(content)
575
+
576
+ if processed_answer_dict["has_two_parts"]:
577
+ think_content = processed_answer_dict["think_part"]
578
+ conv_data_for_ui.append({"role": "assistant", "content": think_content, "metadata":{"title":"Thinking...", "status":"done"}})
579
+ remaining_text = processed_answer_dict["regular_part"]
580
+ if time_str:
581
+ remaining_text += time_str
582
+ conv_data_for_ui.append({"role": "assistant", "content": remaining_text})
583
+ else:
584
+ item_copy = copy.deepcopy(item)
585
+ if time_str:
586
+ item_copy["content"] += time_str
587
+ conv_data_for_ui.append(item_copy)
588
+ return conv_data_for_ui
589
+
590
+ return []
591
 
592
 
593
  def set_rebuild_chat_engine_flag(self, flag):
 
595
 
596
  def chat(self, message, conversation_id=None):
597
 
598
+ start_time = time.time()
599
  create_flag = False
600
  if conversation_id is None:
601
  conversation_id = self.create_conversation(message=message)
 
612
  self.rebuild_chat_engine_flag = False
613
 
614
 
 
 
 
 
 
 
 
615
  # Get response
616
  response = self.chat_engine.chat(message)
617
 
618
+ # answer = response.response
619
 
620
+ elapsed_time = time.time() - start_time
 
 
 
 
621
 
622
 
623
+
624
+ answer_dict = self.chat_store.get_messages(conversation_id)[-1].dict()
625
+ answer_dict['additional_kwargs'].update({"time":elapsed_time})
626
+ new_msg = ChatMessage.model_validate(answer_dict)
627
+ self.chat_store.delete_message(conversation_id, -1)
628
+ self.chat_store.add_message(conversation_id, new_msg)
629
+
630
  self.update_convs_metadata(conversation_id, create_flag=create_flag)
631
+
632
  self.save_metadata()
633
+
634
  self.save_chat_history()
635
+
636
 
637
  return response
638
 
 
721
  """Get conversation metadata"""
722
  return self.convs_metadata.get(conv_id, {})
723
 
724
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
725
 
726
  def save_metadata(self):
727
  """Save conversation metadata to file"""
 
742
  except Exception as e:
743
  print(f"Error loading metadata: {e}")
744
 
745
+ def edit_message(self, index, conversation_id):
746
+ if conversation_id is not None:
747
+
748
+ msg_list = self.chat_store.get_messages(conversation_id)
749
+ new_msg_list = msg_list[:index]
750
+
751
+ self.chat_store.set_messages(conversation_id, new_msg_list)
752
+
753
+
754
+ self.update_convs_metadata(conversation_id)
755
+ self.save_metadata()
756
+ self.save_chat_history()
757
+
758
+
759
+ def retry_message(self, conversation_id):
760
+ if conversation_id is not None:
761
+ self.undo_message(conversation_id)
762
+ self.update_convs_metadata(conversation_id)
763
+ self.save_metadata()
764
+ self.save_chat_history()
765
+
766
+
767
+
768
+ def undo_message(self, conversation_id):
769
+ if conversation_id is not None:
770
+ msg_list = self.chat_store.get_messages(conversation_id)
771
+
772
+
773
+ if msg_list[-1].role == MessageRole.ASSISTANT and len(msg_list) > 0:
774
+ self.chat_store.delete_last_message(conversation_id)
775
+ if msg_list[-1].role == MessageRole.USER and len(msg_list) > 0:
776
+ self.chat_store.delete_last_message(conversation_id)
777
+
778
+
779
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
780
 
781
+ self.update_convs_metadata(conversation_id)
782
+ self.save_metadata()
783
+ self.save_chat_history()
784
+
785
+
786
+ def delete_conversation(self, conversation_id):
787
+ if conversation_id is not None:
788
+ self.chat_store.delete_messages(conversation_id)
789
+ self.convs_metadata.pop(conversation_id)
790
+ self.save_metadata()
791
+ self.save_chat_history()
792
+ self.sorted_conversation_list = self.get_sorted_conversation_list()
793
 
794
 
795
 
 
873
  with gr.Column(scale=3):
874
  vector_dropdown = gr.Dropdown(
875
  label="Injury Biomechanics Knowledge Base",
876
+ choices=[(v["display_name"]) for k, v in self.vector_stores.items()],
877
  value=next(iter(self.vector_stores.keys()), None)
878
 
879
  )
 
936
 
937
 
938
 
 
939
  def chat_with_dekcib(history, user_id, conv_idx):
940
 
941
+ start_time = time.time()
942
 
943
  msg = history[-1]["content"]
944
 
945
  user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
946
  # user_engine.che
947
 
948
+ # conv_id = None
 
 
 
 
 
949
  if conv_idx is not None:
950
  conv_id = user_engine.sorted_conversation_list[conv_idx]
951
+ else:
 
952
  conv_id = None
953
 
954
+ # if len(history) == 1 and conv_idx is None:
955
+ # conv_id = None
956
 
957
 
958
  response = user_engine.chat(msg, conv_id)
959
  answer = response.response
960
 
961
  processed_answer_dict = process_text_with_think_tags(answer)
962
+
963
+
964
  if processed_answer_dict["has_two_parts"]:
965
  think_content = processed_answer_dict["think_part"]
966
  remaining_text = processed_answer_dict["regular_part"]
967
 
968
 
969
+ # thick_msg = gr.ChatMessage(role="assistant", content="", metadata={"title":"Thinking..."})
970
+ history.append({"role": "assistant", "content": "", "metadata":{"title":"Thinking...", "status":"pending"}})
971
+ # history.append(thick_msg)
972
  for character in think_content:
973
+ history[-1]["content"] += character
974
  yield history
975
 
976
+
977
+ elapsed_time = time.time() - start_time
978
+ history[-1]["metadata"]["title"] = f"Thinking... [Thinking time: {elapsed_time:.2f}s]"
979
+ history[-1]["metadata"]["status"] = "done"
980
+ yield history
981
+
982
+ # Start response time measurement
983
+
984
  history.append({"role": "assistant", "content": ""})
985
  for character in remaining_text:
986
  history[-1]["content"] += character
987
  yield history
988
+
989
+ elapsed_time = time.time() - start_time
990
+ history[-1]["content"] += f"\n\n[Total time: {elapsed_time:.2f}s]"
991
+ yield history
992
+
993
+
994
  else:
995
  full_text = processed_answer_dict["full_text"]
996
+ history.append({"role": "assistant", "content": ""})
997
  for character in full_text:
998
  history[-1]["content"] += character
999
  yield history
1000
+
1001
+ elapsed_time = time.time() - start_time
1002
+ history[-1]["content"] += f"\n\n[Total time: {elapsed_time:.2f}s]"
1003
+ yield history
1004
 
1005
 
1006
  def clear_msg():
 
1007
  return ""
1008
 
1009
 
 
1040
 
1041
 
1042
  def click_to_select_conversation(conversation_history, user_id):
1043
+
1044
+
1045
  user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1046
  user_engine.set_current_conv_id(conversation_history, type="index")
1047
 
 
1048
 
1049
+ chat_history = user_engine.get_chat_history_for_ui(user_engine.current_conv_id)
1050
+
1051
+ llm_name = user_engine.convs_metadata[user_engine.current_conv_id]["llm_name"]
1052
+ vector_store_name = user_engine.convs_metadata[user_engine.current_conv_id]["vector_store_name"]
1053
+
1054
+ return gr.update(value=conversation_history), chat_history, gr.update(value=llm_name), gr.update(value=vector_store_name)
1055
+
1056
+
1057
 
1058
 
1059
+
1060
+
1061
+
1062
  conversation_history.click(
1063
  click_to_select_conversation,
1064
  [conversation_history, user_id],
1065
+ [conversation_history, chatbot, llm_dropdown, vector_dropdown]
1066
  )
1067
 
1068
 
 
1101
  self.dekcib_chatbot.session_manager.create_session(user_id)
1102
  user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1103
 
1104
+
 
1105
  # chat_store = user_engine.chat_store
1106
 
1107
  # convs = user_engine.convs
1108
  # history = user_engine.history
1109
 
1110
  sorted_conversation_list = user_engine.get_sorted_conversation_list_for_ui()
1111
+
 
1112
  # sorted_conversation_list = [
1113
  # ["I think therefore I am."],
1114
  # ["The unexamined life is not worth living."],
 
1122
  update_conversation_history = gr.update(samples=sorted_conversation_list, value=index)
1123
 
1124
  user_engine.set_current_conv_id(0, type="index")
1125
+ chat_history = user_engine.get_chat_history_for_ui(user_engine.current_conv_id)
1126
 
1127
+ llm_name = user_engine.convs_metadata[user_engine.current_conv_id]["llm_name"]
1128
+ vector_store_name = user_engine.convs_metadata[user_engine.current_conv_id]["vector_store_name"]
1129
 
1130
  yield llm_name, vector_store_name, update_conversation_history, chat_history
1131
 
 
1149
  user_engine.set_llm(llm_name)
1150
 
1151
 
1152
+
1153
  llm_dropdown.change(
1154
  update_llm,
1155
  [user_id, llm_dropdown],
 
1171
  [user_id, vector_dropdown],
1172
  None
1173
  )
1174
+
1175
+
1176
+ def edit_chat(user_id, history, edit_data: EditData):
1177
+ user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1178
+ idx = edit_data.index
1179
+
1180
+ # Count how many user messages appear up to this index in the UI history
1181
+ user_message_count = 0
1182
+ for i in range(idx + 1):
1183
+ if history[i]["role"] == "user":
1184
+ user_message_count += 1
1185
+
1186
+ # In backend storage, user messages are at positions 0, 2, 4, 6...
1187
+ # So the backend index is (user_message_count - 1) * 2
1188
+ backend_idx = (user_message_count - 1) * 2
1189
+
1190
+ user_engine.edit_message(backend_idx, user_engine.current_conv_id)
1191
+ history = history[: idx+1]
1192
+ return history
1193
 
1194
+
1195
+ chatbot.edit(
1196
+ edit_chat,
1197
+ [user_id, chatbot],
1198
+ [chatbot]
1199
+ ).success(
1200
+ chat_with_dekcib,
1201
+ [chatbot, user_id, conversation_history],
1202
+ [chatbot]
1203
+ ).success(
1204
+ update_conversation_history,
1205
+ [user_id],
1206
+ [conversation_history]
1207
+ )
1208
+
1209
+
1210
+
1211
+ def retry_chat(user_id, history):
1212
+ user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1213
+ user_engine.retry_message(user_engine.current_conv_id)
1214
+
1215
+
1216
+ while history[-1]["role"] == "assistant":
1217
+ history.pop()
1218
+ yield history
1219
+
1220
+
1221
+ return history
1222
+
1223
+
1224
+
1225
+
1226
+
1227
+ chatbot.retry(
1228
+ retry_chat,
1229
+ [user_id, chatbot],
1230
+ [chatbot]
1231
+ ).then(
1232
+ chat_with_dekcib,
1233
+ [chatbot, user_id, conversation_history],
1234
+ [chatbot]
1235
+ ).then(
1236
+ update_conversation_history,
1237
+ [user_id],
1238
+ [conversation_history]
1239
+ )
1240
+
1241
+
1242
+ def undo_chat(user_id):
1243
+ user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1244
+ user_engine.undo_message(user_engine.current_conv_id)
1245
+
1246
+ chat_history = user_engine.get_chat_history_for_ui(user_engine.current_conv_id)
1247
+
1248
+ return chat_history
1249
+
1250
+ chatbot.undo(
1251
+ undo_chat,
1252
+ [user_id],
1253
+ [chatbot]
1254
+ )
1255
+
1256
+
1257
+
1258
+ def clear_conversation(user_id):
1259
+ user_engine = self.dekcib_chatbot.session_manager.sessions[user_id]
1260
+ user_engine.delete_conversation(user_engine.current_conv_id)
1261
+
1262
+ sorted_conversation_list = user_engine.get_sorted_conversation_list_for_ui()
1263
+
1264
+ if len(sorted_conversation_list) > 0:
1265
+ index = 0
1266
+ else:
1267
+ index = None
1268
+ update_conversation_history = gr.update(samples=sorted_conversation_list, value=index)
1269
+
1270
+
1271
+
1272
+ user_engine.set_current_conv_id(index, type="index")
1273
+ chat_history = user_engine.get_chat_history_for_ui(user_engine.current_conv_id)
1274
+
1275
+ yield update_conversation_history, chat_history
1276
+
1277
+
1278
+
1279
+ chatbot.clear(
1280
+ clear_conversation,
1281
+ [user_id],
1282
+ [conversation_history, chatbot]
1283
+ )
1284
+
1285
+
1286
+ # def clear_chatbot(user_id):
1287
+ # print("user_id")
1288
+ # return []
1289
 
1290
+ # user_id_dropdown.change(
1291
+ # clear_chatbot,
1292
+ # [user_id_dropdown],
1293
+ # [chatbot]
1294
+ # )
1295
+
1296
+ # # Load selected conversation from Dataset click
1297
+ # # def load_clicked_conversation(evt: gr.SelectData, user_id, session_state):
1298
+ # # if not user_id or not evt.index:
1299
+ # # return [], session_state
1300
+
1301
+ # # # Get all conversations for this user
1302
+ # # conversations = get_user_conversations(user_id)
1303
+
1304
+ # # # Check if we have enough conversations
1305
+ # # if not conversations or evt.index >= len(conversations):
1306
+ # # return [], session_state
1307
+
1308
+ # # # Get the clicked conversation ID
1309
+ # # selected_conv_id = conversations[evt.index][0] # Get the conversation ID from the tuple
1310
+
1311
+ # # # Load the conversation
1312
+ # # chat_store, chat_history = load_conversation(user_id, selected_conv_id)
1313
+
1314
+ # # if chat_store:
1315
+ # # # Update session state with loaded conversation
1316
+ # # if not session_state:
1317
+ # # session_state = {}
1318
+
1319
+ # # session_state["user_id"] = user_id
1320
+ # # session_state["session_id"] = user_id
1321
+ # # session_state["conversation_id"] = selected_conv_id
1322
+ # # session_state["chat_store"] = chat_store
1323
+ # # session_state["total_score"] = 0.0
1324
+ # # session_state["answer_count"] = 0
1325
+
1326
+ # # return chat_history, session_state
1327
+ # # else:
1328
+ # # return [], session_state
1329
+
1330
+ # # Setup conversation history click handler
1331
+ # conversation_history.select(
1332
+ # load_clicked_conversation,
1333
+ # [user_id_dropdown, session_state],
1334
+ # [chatbot, session_state]
1335
+ # )
1336
 
1337
  # Create new conversation button should only clear the chat area, but not create a new conversation yet
1338
  def prepare_new_chat():
 
1341
  return [], gr.update(value=None)
1342
 
1343
  def print_dataset(value):
 
1344
  print(value)
1345
 
1346
  # Create new conversation