w3680 commited on
Commit
57cde09
·
1 Parent(s): b88c8d2

changed global to state

Browse files
chat_logic/chat_stream.py CHANGED
@@ -63,7 +63,7 @@ def chatbot_answer_init(user_query, vector_db, history, response_type, prompt, k
63
  returns:
64
  answer (list): The model's response added to the chat history.
65
  """
66
- if vector_db:
67
  context = query_vector_db(user_query, vector_db, k)
68
  else:
69
  context = ""
@@ -83,7 +83,7 @@ def chatbot_rag_init(user_query):
83
  vector_database = create_embedding_vector_db(chunks)
84
  return vector_database
85
 
86
- def chatbot_interface(history, user_query, response_type, conversation_state):
87
  """
88
 
89
  UI uses this function to handle general chat functionality.
@@ -100,7 +100,7 @@ def chatbot_interface(history, user_query, response_type, conversation_state):
100
  """
101
  #Diagnose issue
102
  if conversation_state == 'interactive_diagnosis':
103
- answer = chatbot_answer_init(user_query, None, history, response_type, prompt="diagnose_issue")
104
  extracted_info = information_extractor(answer)
105
 
106
  if any(value == '' or value is None or (value is not None and 'none' in value.lower()) or
@@ -110,7 +110,6 @@ def chatbot_interface(history, user_query, response_type, conversation_state):
110
  ):
111
  conversation_state = "interactive_diagnosis"
112
  else:
113
- global vector_db
114
  vector_db = [] # reset vector database to avoid memory issues
115
  vector_db = chatbot_rag_init(answer[-1][1])
116
 
@@ -135,9 +134,9 @@ def chatbot_interface(history, user_query, response_type, conversation_state):
135
  k=5)
136
  # load guides, create embeddings and return answer for first query
137
  print("Answer before returning to Handle User INput:", answer)
138
- return answer, conversation_state
139
 
140
- def handle_user_input(user_input_text, history, conversation_state, response_type):
141
  print(conversation_state)
142
  print(type(conversation_state))
143
  print("History before calling Chatbot Interface:", history)
@@ -145,10 +144,10 @@ def handle_user_input(user_input_text, history, conversation_state, response_typ
145
  if conversation_state == "awaiting_support_confirmation":
146
  yield from support_ticket_needed(user_input_text, history, conversation_state)
147
  else:
148
- answer, conversation_state = chatbot_interface(history, user_input_text, response_type, conversation_state)
149
  print("Answer before returning to Interface Design:", answer)
150
  print("Conversation state before returning to Interface Design:", conversation_state)
151
- yield answer, "", conversation_state
152
 
153
  # Feedback function for thumbs up (chat ends with success message & restarts)
154
  def feedback_positive(history):
@@ -176,7 +175,7 @@ def support_ticket_needed(message, history, conversation_state):
176
  if conversation_state == "awaiting_support_confirmation":
177
  if "yes" in user_message:
178
  ticket_text = chatbot_answer_init("Please summarize this history into a support ticket.",
179
- vector_db,
180
  history,
181
  response_type="Technical",
182
  prompt="support_ticket",
 
63
  returns:
64
  answer (list): The model's response added to the chat history.
65
  """
66
+ if vector_db != []:
67
  context = query_vector_db(user_query, vector_db, k)
68
  else:
69
  context = ""
 
83
  vector_database = create_embedding_vector_db(chunks)
84
  return vector_database
85
 
86
+ def chatbot_interface(history, user_query, response_type, conversation_state, vector_db):
87
  """
88
 
89
  UI uses this function to handle general chat functionality.
 
100
  """
101
  #Diagnose issue
102
  if conversation_state == 'interactive_diagnosis':
103
+ answer = chatbot_answer_init(user_query, vector_db, history, response_type, prompt="diagnose_issue")
104
  extracted_info = information_extractor(answer)
105
 
106
  if any(value == '' or value is None or (value is not None and 'none' in value.lower()) or
 
110
  ):
111
  conversation_state = "interactive_diagnosis"
112
  else:
 
113
  vector_db = [] # reset vector database to avoid memory issues
114
  vector_db = chatbot_rag_init(answer[-1][1])
115
 
 
134
  k=5)
135
  # load guides, create embeddings and return answer for first query
136
  print("Answer before returning to Handle User INput:", answer)
137
+ return answer, conversation_state, vector_db
138
 
139
+ def handle_user_input(user_input_text, history, conversation_state, response_type, vector_db):
140
  print(conversation_state)
141
  print(type(conversation_state))
142
  print("History before calling Chatbot Interface:", history)
 
144
  if conversation_state == "awaiting_support_confirmation":
145
  yield from support_ticket_needed(user_input_text, history, conversation_state)
146
  else:
147
+ answer, conversation_state, vector_db = chatbot_interface(history, user_input_text, response_type, conversation_state, vector_db)
148
  print("Answer before returning to Interface Design:", answer)
149
  print("Conversation state before returning to Interface Design:", conversation_state)
150
+ yield answer, "", conversation_state, vector_db # return answer to the UI and clear the input box
151
 
152
  # Feedback function for thumbs up (chat ends with success message & restarts)
153
  def feedback_positive(history):
 
175
  if conversation_state == "awaiting_support_confirmation":
176
  if "yes" in user_message:
177
  ticket_text = chatbot_answer_init("Please summarize this history into a support ticket.",
178
+ [],
179
  history,
180
  response_type="Technical",
181
  prompt="support_ticket",
ui/interface_design.py CHANGED
@@ -38,7 +38,7 @@ def interface_init():
38
 
39
  # chat_history = gr.State([]) # For maintaining the chat state
40
  conversation_state = gr.State("interactive_diagnosis") # For awaiting the users response if support ticket is needed
41
-
42
  chatbot = gr.Chatbot(elem_id="chat-container")
43
 
44
  # Input components
@@ -52,14 +52,14 @@ def interface_init():
52
 
53
  submit_btn.click(
54
  fn=handle_user_input,
55
- inputs=[user_input, chatbot, conversation_state, response_type],
56
- outputs=[chatbot, user_input, conversation_state]
57
  )
58
 
59
  user_input.submit(
60
  fn=handle_user_input,
61
- inputs=[user_input, chatbot, conversation_state, response_type],
62
- outputs=[chatbot, user_input, conversation_state]
63
  )
64
 
65
  # Connect thumbs up to success message (stops chat)
 
38
 
39
  # chat_history = gr.State([]) # For maintaining the chat state
40
  conversation_state = gr.State("interactive_diagnosis") # For awaiting the users response if support ticket is needed
41
+ vector_db = gr.State([]) # For awaiting the users response if support ticket is needed
42
  chatbot = gr.Chatbot(elem_id="chat-container")
43
 
44
  # Input components
 
52
 
53
  submit_btn.click(
54
  fn=handle_user_input,
55
+ inputs=[user_input, chatbot, conversation_state, response_type, vector_db],
56
+ outputs=[chatbot, user_input, conversation_state, vector_db]
57
  )
58
 
59
  user_input.submit(
60
  fn=handle_user_input,
61
+ inputs=[user_input, chatbot, conversation_state, response_type, vector_db],
62
+ outputs=[chatbot, user_input, conversation_state, vector_db]
63
  )
64
 
65
  # Connect thumbs up to success message (stops chat)