Rulga commited on
Commit
2ca51bf
·
1 Parent(s): 20f2637

Enhance error handling and message formatting in respond and load_vector_store functions

Browse files
Files changed (1) hide show
  1. app.py +98 -24
app.py CHANGED
@@ -27,7 +27,13 @@ def get_context(message, conversation_id):
27
  """Get context from knowledge base"""
28
  vector_store = load_vector_store()
29
  if vector_store is None:
30
- return "Knowledge base not found. Please create it first."
 
 
 
 
 
 
31
 
32
  try:
33
  # Extract context
@@ -42,6 +48,27 @@ def get_context(message, conversation_id):
42
  print(f"Error getting context: {str(e)}")
43
  return ""
44
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
45
  def respond(
46
  message,
47
  history,
@@ -65,9 +92,27 @@ def respond(
65
  messages[0]["content"] += f"\n\nContext for response:\n{context}"
66
 
67
  # Convert history to OpenAI format for API call
68
- for user_msg, assistant_msg in history:
69
- messages.append({"role": "user", "content": user_msg["content"]})
70
- messages.append({"role": "assistant", "content": assistant_msg["content"]})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
71
 
72
  # Add current user message
73
  messages.append({"role": "user", "content": message})
@@ -92,8 +137,23 @@ def respond(
92
  token = chunk.choices[0].delta.content
93
  if token:
94
  response += token
95
- # Return in the format expected by Gradio's message chatbot
96
- yield history + [[{"role": "user", "content": message}, {"role": "assistant", "content": response}]], conversation_id
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
  # Save history if response is complete
99
  if is_complete or response: # add response check as fallback
@@ -114,7 +174,22 @@ def respond(
114
 
115
  except Exception as e:
116
  print(f"Error generating response: {str(e)}")
117
- yield history + [[{"role": "user", "content": message}, {"role": "assistant", "content": "An error occurred while generating the response."}]], conversation_id
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
118
 
119
  def build_kb():
120
  """Function to create knowledge base"""
@@ -124,20 +199,6 @@ def build_kb():
124
  except Exception as e:
125
  return f"Error creating knowledge base: {str(e)}"
126
 
127
- def load_vector_store():
128
- """Load knowledge base from dataset"""
129
- try:
130
- from src.knowledge_base.dataset import DatasetManager
131
- dataset = DatasetManager()
132
- success, store = dataset.download_vector_store()
133
- if success:
134
- return store
135
- print(f"Error loading knowledge base: {store}")
136
- return None
137
- except Exception as e:
138
- print(f"Error loading knowledge base: {str(e)}")
139
- return None
140
-
141
  def respond_and_clear(message, history, conversation_id):
142
  """Handle chat message and clear input"""
143
  # Get model parameters from config
@@ -158,9 +219,22 @@ def respond_and_clear(message, history, conversation_id):
158
 
159
  # Return first yielded response directly
160
  # The respond function now yields data in the expected format
161
- new_history, conv_id = next(response_generator)
162
-
163
- return new_history, conv_id, "" # Clear message input
 
 
 
 
 
 
 
 
 
 
 
 
 
164
 
165
  # Create interface
166
  with gr.Blocks() as demo:
 
27
  """Get context from knowledge base"""
28
  vector_store = load_vector_store()
29
  if vector_store is None:
30
+ print("Knowledge base not found or failed to load")
31
+ return ""
32
+
33
+ # Check if vector_store is a string (error message) instead of an actual store
34
+ if isinstance(vector_store, str):
35
+ print(f"Error with vector store: {vector_store}")
36
+ return ""
37
 
38
  try:
39
  # Extract context
 
48
  print(f"Error getting context: {str(e)}")
49
  return ""
50
 
51
+ def load_vector_store():
52
+ """Load knowledge base from dataset"""
53
+ try:
54
+ from src.knowledge_base.dataset import DatasetManager
55
+ dataset = DatasetManager()
56
+ success, result = dataset.download_vector_store()
57
+
58
+ if success and result is not None and not isinstance(result, str):
59
+ return result
60
+
61
+ # If result is a string, it's an error message
62
+ if isinstance(result, str):
63
+ print(f"Error loading knowledge base: {result}")
64
+ else:
65
+ print("Knowledge base is None or invalid format")
66
+
67
+ return None
68
+ except Exception as e:
69
+ print(f"Exception loading knowledge base: {str(e)}")
70
+ return None
71
+
72
  def respond(
73
  message,
74
  history,
 
92
  messages[0]["content"] += f"\n\nContext for response:\n{context}"
93
 
94
  # Convert history to OpenAI format for API call
95
+ # Handle different possible history formats more carefully
96
+ if history:
97
+ try:
98
+ for item in history:
99
+ # Check if we have a pair of messages as expected
100
+ if isinstance(item, list) and len(item) == 2:
101
+ user_msg, assistant_msg = item
102
+
103
+ # Handle different formats of user_msg and assistant_msg
104
+ if isinstance(user_msg, dict) and "content" in user_msg:
105
+ messages.append({"role": "user", "content": user_msg["content"]})
106
+ elif isinstance(user_msg, str):
107
+ messages.append({"role": "user", "content": user_msg})
108
+
109
+ if isinstance(assistant_msg, dict) and "content" in assistant_msg:
110
+ messages.append({"role": "assistant", "content": assistant_msg["content"]})
111
+ elif isinstance(assistant_msg, str):
112
+ messages.append({"role": "assistant", "content": assistant_msg})
113
+ except Exception as e:
114
+ print(f"Error processing history: {str(e)}")
115
+ # Continue with empty history if there was an error
116
 
117
  # Add current user message
118
  messages.append({"role": "user", "content": message})
 
137
  token = chunk.choices[0].delta.content
138
  if token:
139
  response += token
140
+ # Format response in the way Gradio chatbot expects for type="messages"
141
+ # For messages format, each message must be a dict with 'role' and 'content'
142
+ new_history = []
143
+
144
+ # Copy existing history in the correct format
145
+ if history:
146
+ for item in history:
147
+ if isinstance(item, list) and len(item) == 2:
148
+ new_history.append(item)
149
+
150
+ # Add the new message pair
151
+ new_history.append([
152
+ {"role": "user", "content": message},
153
+ {"role": "assistant", "content": response}
154
+ ])
155
+
156
+ yield new_history, conversation_id
157
 
158
  # Save history if response is complete
159
  if is_complete or response: # add response check as fallback
 
174
 
175
  except Exception as e:
176
  print(f"Error generating response: {str(e)}")
177
+ # Format error response in the way Gradio chatbot expects
178
+ new_history = []
179
+
180
+ # Copy existing history
181
+ if history:
182
+ for item in history:
183
+ if isinstance(item, list) and len(item) == 2:
184
+ new_history.append(item)
185
+
186
+ # Add error message
187
+ new_history.append([
188
+ {"role": "user", "content": message},
189
+ {"role": "assistant", "content": f"An error occurred while generating the response: {str(e)}"}
190
+ ])
191
+
192
+ yield new_history, conversation_id
193
 
194
  def build_kb():
195
  """Function to create knowledge base"""
 
199
  except Exception as e:
200
  return f"Error creating knowledge base: {str(e)}"
201
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
202
  def respond_and_clear(message, history, conversation_id):
203
  """Handle chat message and clear input"""
204
  # Get model parameters from config
 
219
 
220
  # Return first yielded response directly
221
  # The respond function now yields data in the expected format
222
+ try:
223
+ new_history, conv_id = next(response_generator)
224
+ return new_history, conv_id, "" # Clear message input
225
+ except Exception as e:
226
+ print(f"Error in respond_and_clear: {str(e)}")
227
+ # Return a minimal valid format if there's an error
228
+ if history:
229
+ return history + [[
230
+ {"role": "user", "content": message},
231
+ {"role": "assistant", "content": f"An error occurred: {str(e)}"}
232
+ ]], conversation_id, ""
233
+ else:
234
+ return [[
235
+ {"role": "user", "content": message},
236
+ {"role": "assistant", "content": f"An error occurred: {str(e)}"}
237
+ ]], conversation_id, ""
238
 
239
  # Create interface
240
  with gr.Blocks() as demo: