import gradio as gr from huggingface_hub import InferenceClient import pandas as pd client = InferenceClient("microsoft/phi-4") clothing_df = pd.read_csv("clothing_database.csv") #load clothing database def respond(message,history): #function to return the history of messages sent messages = [{"role": "system", "content": "You are a clothing assistant. Based on the the user's request, suggest suitable clothing items from the database and return their image paths"}] if history: messages.extend(history) messages.append({"role": "user", "content":message}) #get AI reasoning response = client.chat_completion(messages, max_tokens=200) ai_text = response return response['choices'][0]['message']['content'].strip() chatbot = gr.ChatInterface(respond, type="messages", title = "Capstone project") # chatbot UI - conversation history and user input #Load the clothing database clothing_df = pd.read_csv("clothing_databse.csv") def def search_clothing(weather=None, formality=None): results = clothing_df if weather: results = results[results[weather].str.contain(weather, case=False)] if formality: results = results[results[formality].str.contains(formality, case=False)] return results chatbot.launch()