Rulga commited on
Commit
20f2637
·
1 Parent(s): 66cbdf8

Refactor message history handling in respond function for improved format consistency and error management

Browse files
Files changed (1) hide show
  1. app.py +12 -18
app.py CHANGED
@@ -64,12 +64,10 @@ def respond(
64
  if context:
65
  messages[0]["content"] += f"\n\nContext for response:\n{context}"
66
 
67
- # Convert history to OpenAI format
68
- for msg_pair in history:
69
- messages.extend([
70
- {"role": "user", "content": msg_pair[0]["content"]},
71
- {"role": "assistant", "content": msg_pair[1]["content"]}
72
- ])
73
 
74
  # Add current user message
75
  messages.append({"role": "user", "content": message})
@@ -94,7 +92,8 @@ def respond(
94
  token = chunk.choices[0].delta.content
95
  if token:
96
  response += token
97
- yield [[{"role": "user", "content": message}, {"role": "assistant", "content": response}]], conversation_id
 
98
 
99
  # Save history if response is complete
100
  if is_complete or response: # add response check as fallback
@@ -115,7 +114,7 @@ def respond(
115
 
116
  except Exception as e:
117
  print(f"Error generating response: {str(e)}")
118
- yield [[{"role": "user", "content": message}, {"role": "assistant", "content": "An error occurred while generating the response."}]], conversation_id
119
 
120
  def build_kb():
121
  """Function to create knowledge base"""
@@ -157,16 +156,11 @@ def respond_and_clear(message, history, conversation_id):
157
  top_p=top_p
158
  )
159
 
160
- # Return first yielded response
161
- response_data, conv_id = next(response_generator)
 
162
 
163
- # Convert response to the correct format for Gradio chatbot
164
- formatted_history = history + [[
165
- {"role": "user", "content": message},
166
- {"role": "assistant", "content": response_data[0][1]}
167
- ]]
168
-
169
- return formatted_history, conv_id, "" # Clear message input
170
 
171
  # Create interface
172
  with gr.Blocks() as demo:
@@ -322,4 +316,4 @@ if __name__ == "__main__":
322
  if not load_vector_store():
323
  print("Knowledge base not found. Please create it through the interface.")
324
 
325
- demo.launch()
 
64
  if context:
65
  messages[0]["content"] += f"\n\nContext for response:\n{context}"
66
 
67
+ # Convert history to OpenAI format for API call
68
+ for user_msg, assistant_msg in history:
69
+ messages.append({"role": "user", "content": user_msg["content"]})
70
+ messages.append({"role": "assistant", "content": assistant_msg["content"]})
 
 
71
 
72
  # Add current user message
73
  messages.append({"role": "user", "content": message})
 
92
  token = chunk.choices[0].delta.content
93
  if token:
94
  response += token
95
+ # Return in the format expected by Gradio's message chatbot
96
+ yield history + [[{"role": "user", "content": message}, {"role": "assistant", "content": response}]], conversation_id
97
 
98
  # Save history if response is complete
99
  if is_complete or response: # add response check as fallback
 
114
 
115
  except Exception as e:
116
  print(f"Error generating response: {str(e)}")
117
+ yield history + [[{"role": "user", "content": message}, {"role": "assistant", "content": "An error occurred while generating the response."}]], conversation_id
118
 
119
  def build_kb():
120
  """Function to create knowledge base"""
 
156
  top_p=top_p
157
  )
158
 
159
+ # Return first yielded response directly
160
+ # The respond function now yields data in the expected format
161
+ new_history, conv_id = next(response_generator)
162
 
163
+ return new_history, conv_id, "" # Clear message input
 
 
 
 
 
 
164
 
165
  # Create interface
166
  with gr.Blocks() as demo:
 
316
  if not load_vector_store():
317
  print("Knowledge base not found. Please create it through the interface.")
318
 
319
+ demo.launch()