File size: 1,640 Bytes
f3815a3 599e724 f3815a3 599e724 f3815a3 599e724 f3815a3 599e724 f3815a3 738f8e7 f3815a3 d5baeef f3815a3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 |
import gradio as gr
from huggingface_hub import InferenceClient
# ✅ This model supports chat completions through the free Inference API
MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta"
client = InferenceClient(model=MODEL_NAME)
# Example fake recipe database
recipes = [
{
"name": "Veggie Pasta",
"ingredients": ["pasta", "tomato", "garlic", "olive oil"],
"allergies": ["gluten"],
"budget": "low"
},
{
"name": "Chicken Stir Fry",
"ingredients": ["chicken", "soy sauce", "broccoli", "garlic"],
"allergies": ["soy"],
"budget": "medium"
}
]
client = InferenceClient("microsoft/phi-4")
def respond(message, history):
messages = [{"role": "system", "content": "You are a friendly chatbot"}]
if history:
messages.extend(history)
messages.append({"role": "user", "content": message})
response = client.chat_completion(
messages,
max_tokens=100
)
return response['choices'][0]['message']['content'].strip()
with gr.Blocks() as demo:
gr.Markdown("## 🍳 Recipe Suggestion Chatbot")
slider = gr.Slider(
minimum=0, # lowest value
maximum=100, # highest value
value=50, # default starting value
step=1, # increment step
label="Select a number"
)
have_items = gr.Textbox(label="Ingredients you have (comma separated)", placeholder="pasta,tomato,garlic")
allergies = gr.Textbox(label="Allergies (comma separated)", placeholder="gluten,soy")
chatbot_ui = gr.ChatInterface(fn=lambda msg, hist: respond(msg, hist))
demo.launch()
|