antoinette-f commited on
Commit
4822912
·
verified ·
1 Parent(s): 0eee7fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -8
app.py CHANGED
@@ -52,22 +52,20 @@ def get_suggestions(query):
52
  final_selection = pd.concat(outfit)
53
  return final_selection
54
 
 
55
  # Chatbot + image output function
56
  def respond(message, chat_history):
57
  """
58
  Handles the user message, generates a response, and suggests clothing.
59
- This is where the TypeError was occurring. The key fix is ensuring a tuple is always returned.
60
  """
61
  # Build conversation messages for AI model
62
  messages = [{"role": "system", "content": "You are a clothing assistant. Suggest suitable clothing items from the database based on the user's query."}]
63
 
64
- # Add user message to history
65
- history.append({"role": "user", "content": message})
66
 
67
  # Query Hugging Face Inference API
68
- # The `stream` parameter is not supported by Gradio's Chatbot.
69
- # We will use the non-streaming `chat_completion` method.
70
- ai_response = client.chat_completion(messages=history, max_tokens=1000)
71
 
72
  # Extract the AI's reasoning or a fallback
73
  reasoning = ""
@@ -83,8 +81,7 @@ def respond(message, chat_history):
83
  # Add chatbot history
84
  chat_history.append((message, reasoning))
85
 
86
- # This is the corrected return statement.
87
- # It MUST return a tuple with two elements: the updated chat_history and the image_paths.
88
  return chat_history, image_paths
89
 
90
  # Gradio UI ---
 
52
  final_selection = pd.concat(outfit)
53
  return final_selection
54
 
55
+ # Chatbot + image output function
56
  # Chatbot + image output function
57
  def respond(message, chat_history):
58
  """
59
  Handles the user message, generates a response, and suggests clothing.
 
60
  """
61
  # Build conversation messages for AI model
62
  messages = [{"role": "system", "content": "You are a clothing assistant. Suggest suitable clothing items from the database based on the user's query."}]
63
 
64
+ # Add user message to history. The variable name is now correct.
65
+ chat_history.append({"role": "user", "content": message})
66
 
67
  # Query Hugging Face Inference API
68
+ ai_response = client.chat_completion(messages=chat_history, max_tokens=1000)
 
 
69
 
70
  # Extract the AI's reasoning or a fallback
71
  reasoning = ""
 
81
  # Add chatbot history
82
  chat_history.append((message, reasoning))
83
 
84
+ # Return the updated chat history and the image paths
 
85
  return chat_history, image_paths
86
 
87
  # Gradio UI ---