antoinette-f commited on
Commit
d8e406b
·
verified ·
1 Parent(s): a6cb699

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +68 -2
app.py CHANGED
@@ -7,6 +7,23 @@ from huggingface_hub import InferenceClient
7
  # The client is used for generating the AI's text response.
8
  client = InferenceClient("microsoft/phi-4")
9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
10
  # Load the clothing database CSV. Make sure 'clothing.csv' is in the same directory.
11
  try:
12
  clothing_df = pd.read_csv("clothing.csv")
@@ -32,8 +49,6 @@ def get_suggestions(query):
32
  results = results[results['weather'].str.contains('hot', case=False, na=False)]
33
  elif 'snow' in q:
34
  results = results[results['weather'].str.contains('snow', case=False, na=False)]
35
- else:
36
- results = results[results['weather'].str.contains('all', case=False, na=False)]
37
 
38
  # Formality-based filtering
39
  if "formal" in q or "office" in q:
@@ -62,6 +77,57 @@ def get_suggestions(query):
62
  final_selection = pd.concat(outfit)
63
  return final_selection
64
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  # Chatbot + image output function
66
  def respond(message, chat_history):
67
  """
 
7
  # The client is used for generating the AI's text response.
8
  client = InferenceClient("microsoft/phi-4")
9
 
10
+ # Load the clothing database CSV. Make sure 'clothing.csv' is in the same directory.
11
+ try:
12
+ clothing_df = pd.read_csv("clothing.csv")
13
+ except FileNotFoundError:
14
+ print("Error: 'clothing.csv' not found. Please ensure the file is in the same directory.")
15
+ clothing_df = pd.DataFrame(columns=['weather', 'formality', 'category', 'image_path'])
16
+
17
+ # Simple filtering function to get clothing suggestions based on a query
18
+ import gradio as gr
19
+ import pandas as pd
20
+ from huggingface_hub import InferenceClient
21
+
22
+ # Load the AI model and clothing database
23
+ # NOTE: Replace 'microsoft/phi-4' with the actual model ID if different
24
+ # The client is used for generating the AI's text response.
25
+ client = InferenceClient("microsoft/phi-4")
26
+
27
  # Load the clothing database CSV. Make sure 'clothing.csv' is in the same directory.
28
  try:
29
  clothing_df = pd.read_csv("clothing.csv")
 
49
  results = results[results['weather'].str.contains('hot', case=False, na=False)]
50
  elif 'snow' in q:
51
  results = results[results['weather'].str.contains('snow', case=False, na=False)]
 
 
52
 
53
  # Formality-based filtering
54
  if "formal" in q or "office" in q:
 
77
  final_selection = pd.concat(outfit)
78
  return final_selection
79
 
80
+ # Chatbot + image output function
81
+ def respond(message, chat_history):
82
+
83
+ # Build conversation messages for the AI model
84
+ messages = [{"role": "system", "content": "You are a clothing assistant. Suggest suitable clothing items from the database based on the user's query."}]
85
+
86
+ # Iterate through the history tuples and add them to the messages list
87
+ for user_msg, bot_msg in chat_history:
88
+ messages.append({"role": "user", "content": user_msg})
89
+ messages.append({"role": "assistant", "content": bot_msg})
90
+
91
+ # Add the current user message to the list for the AI call
92
+ messages.append({"role": "user", "content": message})
93
+
94
+ # Query Hugging Face Inference API
95
+ ai_response = client.chat_completion(messages=messages, max_tokens=1000)
96
+
97
+ # Extract the AI's reasoning or a fallback
98
+ reasoning = "I'm having trouble generating a response. Please try again."
99
+ if ai_response.choices and ai_response.choices[0].message:
100
+ reasoning = ai_response.choices[0].message.content.strip()
101
+
102
+ # Get filtered clothing suggestions
103
+ matches = get_suggestions(message)
104
+ image_paths = matches['image_path'].tolist()
105
+
106
+ # Append the new message and response as a single tuple to the chat history
107
+ chat_history.append((message, reasoning))
108
+
109
+ # Return the updated chat history and the image paths
110
+ return chat_history, image_paths
111
+
112
+ # Gradio UI ---
113
+ with gr.Blocks(theme=gr.themes.Soft()) as demo:
114
+ gr.Markdown("## Fashioneer - your fashion pioneer!")
115
+ gr.Markdown("Ask me what to wear and I'll suggest clothing with images from the database.")
116
+
117
+ # This is the search bar at the top
118
+ user_input = gr.Textbox(label="Type your outfit requirements here", placeholder="e.g., What should I wear on a rainy day?")
119
+
120
+ # This row will place the chatbot and gallery side-by-side
121
+ with gr.Row():
122
+ chatbot = gr.Chatbot(label="Chatbot Conversation")
123
+ gallery = gr.Gallery(label="Recommended Clothing", columns=2, height='auto')
124
+
125
+ # This is the original line that connects the input and output
126
+ user_input.submit(fn=respond, inputs=[user_input, chatbot], outputs=[chatbot, gallery])
127
+
128
+ # This starts the Gradio app
129
+ demo.launch()
130
+
131
  # Chatbot + image output function
132
  def respond(message, chat_history):
133
  """