import gradio as gr from huggingface_hub import InferenceClient # ✅ This model supports chat completions through the free Inference API MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta" client = InferenceClient(model=MODEL_NAME) # Example fake recipe database recipes = [ { "name": "Veggie Pasta", "ingredients": ["pasta", "tomato", "garlic", "olive oil"], "allergies": ["gluten"], "budget": "low" }, { "name": "Chicken Stir Fry", "ingredients": ["chicken", "soy sauce", "broccoli", "garlic"], "allergies": ["soy"], "budget": "medium" } ] client = InferenceClient("microsoft/phi-4") def respond(message, history): messages = [{"role": "system", "content": "You are a friendly chatbot"}] if history: messages.extend(history) messages.append({"role": "user", "content": message}) response = client.chat_completion( messages, max_tokens=100 ) return response['choices'][0]['message']['content'].strip() with gr.Blocks() as demo: gr.Markdown("## 🍳 Recipe Suggestion Chatbot") slider = gr.Slider( minimum=0, # lowest value maximum=100, # highest value value=50, # default starting value step=1, # increment step label="Select a number" ) have_items = gr.Textbox(label="Ingredients you have (comma separated)", placeholder="pasta,tomato,garlic") allergies = gr.Textbox(label="Allergies (comma separated)", placeholder="gluten,soy") chatbot_ui = gr.ChatInterface(fn=lambda msg, hist: respond(msg, hist)) demo.launch()