Ifeeab05 commited on
Commit
8965f8c
·
verified ·
1 Parent(s): 063f132

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +47 -51
app.py CHANGED
@@ -2,75 +2,71 @@ import gradio as gr
2
  import pandas as pd
3
  from huggingface_hub import InferenceClient
4
 
5
- # Load AI model for reasoning
6
- # You can swap to another LLM available on Hugging Face
7
  client = InferenceClient("microsoft/phi-4")
8
 
9
- # Load clothing database CSV
10
- # CSV must have: name, image_path, category, formality, weather, notes
11
  clothing_df = pd.read_csv("clothes.csv")
12
 
13
- #Simple filtering function
14
  def get_suggestions(query):
15
  results = clothing_df
16
-
17
- # Weather-based filtering
18
- if "rain" in query.lower():
19
- results = results[results['weather'].str.contains("rain", case=False)]
20
- elif "cold" in query.lower():
21
- results = results[results['weather'].str.contains("cold", case=False)]
22
- elif "hot" in query.lower():
23
- results = results[results['weather'].str.contains("hot", case=False)]
24
- elif "snow" in query.lower():
25
- results = results[results['weather'].str.contains("snow", case=False)]
26
-
27
- # Formality-based filtering
28
- elif "formal" in query.lower() or "office" in query.lower():
29
- results = results[results['formality'].str.contains("formal", case=False)]
30
- elif "casual" in query.lower():
31
- results = results[results['formality'].str.contains("casual", case=False)]
32
-
33
- # If no keywords match, return random sample
 
 
 
34
  else:
35
- results = results.sample(min(4, len(results)))
36
-
37
-
38
 
39
  return results
40
 
41
- # Chatbot + image output
42
  def respond(message, history):
43
- # Send to LLM for reasoning
44
  messages = [{"role": "system", "content": "You are a clothing assistant. Suggest suitable clothing items from the database based on the user's needs."}]
45
- if history:
46
- for user_msg, bot_msg in history:
47
- messages.append({"role": "user", "content": user_msg})
48
- messages.append({"role": "assistant", "content": bot_msg})
49
  messages.append({"role": "user", "content": message})
50
-
51
- ai_response = client.chat_completion(messages, max_tokens=150)
52
- reasoning = ai_response['choices'][0]['message']['content'].strip()
53
 
54
- # Get filtered clothing
55
- matches = get_suggestions(message)
 
56
 
57
- # Create image list for gallery
 
58
  image_paths = matches['image_path'].tolist()
59
 
60
- return reasoning, image_paths
 
 
61
 
62
- # --- Gradio UI ---
63
  with gr.Blocks() as demo:
64
- chatbot = gr.Chatbot(label="Chatbot Conversation", type="messages")
65
- gallery = gr.Gallery(label="Recommended Clothing", height="auto")
66
- btn = gr.Button("Show Images")
 
 
 
67
 
68
- def show_images():
69
- return[
70
- ("https://picsum.photos/200", "Image 1"),
71
- ("https://picsum.photos/201", "Image 2"),
72
- ("https://picsum.photos/202", "Image 3")
73
- ]
74
- btn.click(fn=show_images, outputs=gallery)
75
 
76
- demo.launch()
 
2
  import pandas as pd
3
  from huggingface_hub import InferenceClient
4
 
5
+ # Load AI model for reasoning
 
6
  client = InferenceClient("microsoft/phi-4")
7
 
8
+ # Load clothing database CSV
 
9
  clothing_df = pd.read_csv("clothes.csv")
10
 
11
+ # --- Simple filtering function ---
12
  def get_suggestions(query):
13
  results = clothing_df
14
+
15
+ q = query.lower()
16
+ # Weather-based filtering
17
+ if "rain" in q:
18
+ results = results[results['weather'].str.contains("rain", case=False, na=False)]
19
+ elif "cold" in q:
20
+ results = results[results['weather'].str.contains("cold", case=False, na=False)]
21
+ elif "hot" in q:
22
+ results = results[results['weather'].str.contains("hot", case=False, na=False)]
23
+ elif "snow" in q:
24
+ results = results[results['weather'].str.contains("snow", case=False, na=False)]
25
+
26
+ # Formality-based filtering
27
+ elif "formal" in q or "office" in q:
28
+ results = results[results['formality'].str.contains("formal", case=False, na=False)]
29
+ elif "casual" in q:
30
+ results = results[results['formality'].str.contains("casual", case=False, na=False)]
31
+
32
+ # If no keywords match, return random sample
33
+ if len(results) == 0:
34
+ results = clothing_df.sample(min(3, len(clothing_df)))
35
  else:
36
+ results = results.sample(min(3, len(results)))
 
 
37
 
38
  return results
39
 
40
+ # --- Chatbot + image output ---
41
  def respond(message, history):
42
+ # Build conversation messages for AI model
43
  messages = [{"role": "system", "content": "You are a clothing assistant. Suggest suitable clothing items from the database based on the user's needs."}]
44
+ for user_msg, bot_msg in history:
45
+ messages.append({"role": "user", "content": user_msg})
46
+ messages.append({"role": "assistant", "content": bot_msg})
 
47
  messages.append({"role": "user", "content": message})
 
 
 
48
 
49
+ # Query Hugging Face Inference API
50
+ ai_response = client.chat_completion(model="microsoft/phi-4", messages=messages, max_tokens=150)
51
+ reasoning = ai_response.choices[0].message["content"].strip()
52
 
53
+ # Get filtered clothing
54
+ matches = get_suggestions(message)
55
  image_paths = matches['image_path'].tolist()
56
 
57
+ # Add to chatbot history
58
+ history.append((message, reasoning))
59
+ return history, image_paths
60
 
61
+ # --- Gradio UI ---
62
  with gr.Blocks() as demo:
63
+ gr.Markdown("## Fashioneer - your fashion pioneer!")
64
+ gr.Markdown("Ask me what to wear and I'll suggest clothing with images from the database.")
65
+
66
+ chatbot = gr.Chatbot(label="Chatbot Conversation")
67
+ gallery = gr.Gallery(label="Recommended Clothing", columns=3, height="auto")
68
+ user_input = gr.Textbox(label="Type your outfit requirements here", placeholder="e.g., What should I wear on a rainy day?")
69
 
70
+ user_input.submit(respond, [user_input, chatbot], [chatbot, gallery])
 
 
 
 
 
 
71
 
72
+ demo.launch()