import gradio as gr from huggingface_hub import InferenceClient theme = gr.themes.Ocean( primary_hue="red", secondary_hue="emerald", neutral_hue="amber", ) client = InferenceClient("microsoft/phi-4") def respond(message, history, budget, have_items, allergies): # Start with a system prompt instruction = "You are a friendly chatbot that is designed to help users find recipes they can cook with ingredients they already have at home. " instruction += "The current user has " + str(have_items) + "at home. " instruction += "The current user is allergic to: " + str(allergies) + ". These ingredients must under no circumstance feature in the recipes you provide. " instruction += "For purchasing any additional items, the current user has a budget of: " + str(budget) + "£. " instruction += "After the user says what they want to have, you must respond with a recipe you find online, please lay out your response in the following way: " instruction += """ NAME OF THE RECIPE AS A TITLE WITH A BORDER USING = SIGNS the ingredients required for the recipe listed (in brackets the cost of any ingredients the user doesn't already have) a short (max 350 words) summary of the method for making the meal detailing any special appliances required a link to the source from which you found the recipe a message asking the user if they want to make this recipe, want more info on where to buy ingredients, or if they want something else. (depending on the user's response you must respond to their request, if it is for another recipe, follow the exact same format) """ messages = [{"role": "system", "content": instruction}] # Convert Gradio history (list of [user, assistant] pairs) to OpenAI-style message format for user_msg, bot_msg in history: if user_msg: messages.append({"role": "user", "content": user_msg}) if bot_msg: messages.append({"role": "assistant", "content": bot_msg}) # Append the new user message messages.append({"role": "user", "content": message}) # Call the model response = client.chat_completion( messages=messages, max_tokens=500 ) return response["choices"][0]["message"]["content"].strip() with gr.Blocks(theme=theme) as demo: gr.Markdown("## RecipEASY 🍳") budget = gr.Slider( minimum=0, # lowest value maximum=100, # highest value step=1, # increment step label="Enter Budget:" ) have_items = gr.Textbox(label="Ingredients you have", placeholder="e.g. pasta, tomato, garlic") allergies = gr.Textbox(label="Dietary restrictions", placeholder="e.g. gluten, soy") gr.ChatInterface( fn=respond, additional_inputs=[budget, have_items, allergies] ) demo.launch()