|
|
import gradio as gr |
|
|
from huggingface_hub import InferenceClient |
|
|
|
|
|
|
|
|
MODEL_NAME = "HuggingFaceH4/zephyr-7b-beta" |
|
|
|
|
|
client = InferenceClient(model=MODEL_NAME) |
|
|
|
|
|
|
|
|
recipes = [ |
|
|
{ |
|
|
"name": "Veggie Pasta", |
|
|
"ingredients": ["pasta", "tomato", "garlic", "olive oil"], |
|
|
"allergies": ["gluten"], |
|
|
"budget": "low" |
|
|
}, |
|
|
{ |
|
|
"name": "Chicken Stir Fry", |
|
|
"ingredients": ["chicken", "soy sauce", "broccoli", "garlic"], |
|
|
"allergies": ["soy"], |
|
|
"budget": "medium" |
|
|
} |
|
|
] |
|
|
|
|
|
client = InferenceClient("microsoft/phi-4") |
|
|
|
|
|
def respond(message, history): |
|
|
messages = [{"role": "system", "content": "You are a friendly chatbot"}] |
|
|
if history: |
|
|
messages.extend(history) |
|
|
messages.append({"role": "user", "content": message}) |
|
|
|
|
|
response = client.chat_completion( |
|
|
messages, |
|
|
max_tokens=100 |
|
|
) |
|
|
|
|
|
return response['choices'][0]['message']['content'].strip() |
|
|
|
|
|
|
|
|
with gr.Blocks() as demo: |
|
|
gr.Markdown("## 🍳 Recipe Suggestion Chatbot") |
|
|
|
|
|
slider = gr.Slider( |
|
|
minimum=0, |
|
|
maximum=100, |
|
|
value=50, |
|
|
step=1, |
|
|
label="Select a number" |
|
|
) |
|
|
|
|
|
have_items = gr.Textbox(label="Ingredients you have (comma separated)", placeholder="pasta,tomato,garlic") |
|
|
allergies = gr.Textbox(label="Allergies (comma separated)", placeholder="gluten,soy") |
|
|
chatbot_ui = gr.ChatInterface(fn=lambda msg, hist: respond(msg, hist)) |
|
|
|
|
|
demo.launch() |
|
|
|