LeannJoy commited on
Commit
21ae22e
·
verified ·
1 Parent(s): ec72a13

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +271 -0
app.py ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import os
3
+ import uuid
4
+ from streamlit_chat import message
5
+
6
+ # HuggingFace integration
7
+ from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
8
+
9
+ # Core LangChain schema
10
+ from langchain_core.messages import (
11
+ AIMessage,
12
+ HumanMessage,
13
+ SystemMessage,
14
+ BaseMessage
15
+ )
16
+
17
+ # --- 1. Load Environment Variables ---
18
+ HF_API_TOKEN = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
19
+ MODEL_REPO_ID = "Mistralai/Mistral-7B-Instruct-v0.2"
20
+
21
+ # --- 2. Global System Prompt (Chef Personality) ---
22
+ SYSTEM_PROMPT = SystemMessage(
23
+ content=(
24
+ "You are Kitchen Buddy 👨‍🍳, a warm and friendly culinary assistant. "
25
+ "Your job is to help people with anything related to food, cooking, or cuisine.\n\n"
26
+ "You can:\n"
27
+ "- Explain what ingredients are, their uses, and their cultural background\n"
28
+ "- Suggest recipes and meal ideas\n"
29
+ "- Offer ingredient substitutions\n"
30
+ "- Teach cooking techniques and science\n"
31
+ "- Provide healthy diet adaptations\n"
32
+ "- Explore global cuisines & traditions\n\n"
33
+ "Keep your tone helpful and approachable. "
34
+ "If a user asks about a food item (e.g., 'what are apples'), explain what it is and how it’s commonly used. "
35
+ "If they ask what to cook with it, suggest a few recipes. "
36
+ "If something is unrelated to food or cooking, politely redirect back to culinary topics."
37
+ )
38
+ )
39
+
40
+ # --- 3. Streamlit UI Setup ---
41
+ st.set_page_config(page_title="Kitchen Buddy 👨‍🍳", layout="centered")
42
+ st.title("👨‍🍳 Kitchen Buddy")
43
+ st.markdown(f"""
44
+ Your friendly culinary assistant — ask about recipes, ingredients, and cooking techniques.
45
+ **🤖 Model in use:** `{MODEL_REPO_ID}`
46
+ """)
47
+ # --- 4. Initialise LLM ---
48
+ @st.cache_resource
49
+ def initialize_llm():
50
+ if not HF_API_TOKEN:
51
+ st.error("🚨 Please set the secret **HUGGINGFACEHUB_API_TOKEN** in your HF Space.")
52
+ return None
53
+
54
+ os.environ['HUGGINGFACEHUB_API_TOKEN'] = HF_API_TOKEN
55
+ try:
56
+ llm = HuggingFaceEndpoint(
57
+ repo_id=MODEL_REPO_ID,
58
+ task="text-generation",
59
+ max_new_tokens=512,
60
+ temperature=0.7,
61
+ do_sample=True,
62
+ repetition_penalty=1.03
63
+ )
64
+ chat_model = ChatHuggingFace(llm=llm)
65
+ return chat_model
66
+ except Exception as e:
67
+ st.error("❌ Failed to initialize LLM. Check API key and model availability.")
68
+ print(f"Detailed LLM init error: {e}")
69
+ return None
70
+
71
+ CHAT_MODEL = initialize_llm()
72
+
73
+ # --- 5. Session State for Multiple Chats ---
74
+ def new_chat():
75
+ new_id = str(uuid.uuid4())
76
+ st.session_state.chats[new_id] = [SYSTEM_PROMPT]
77
+ st.session_state.chat_titles[new_id] = "New Chat"
78
+ st.session_state.current_chat_id = new_id
79
+
80
+ if 'chats' not in st.session_state:
81
+ st.session_state.chats = {}
82
+ st.session_state.chat_titles = {}
83
+ new_chat()
84
+ if 'current_chat_id' not in st.session_state:
85
+ new_chat()
86
+ if 'generate_next' not in st.session_state:
87
+ st.session_state.generate_next = False
88
+
89
+
90
+ def get_current_messages() -> list[BaseMessage]:
91
+ return st.session_state.chats.get(st.session_state.current_chat_id, [SYSTEM_PROMPT])
92
+
93
+ def set_current_chat(chat_id):
94
+ st.session_state.current_chat_id = chat_id
95
+
96
+ def convert_to_streamlit_message(msg: BaseMessage):
97
+ if isinstance(msg, SystemMessage):
98
+ return None, None
99
+ role = "user" if isinstance(msg, HumanMessage) else "assistant"
100
+ return msg.content, role
101
+
102
+
103
+ # --- 6. Sidebar Chat History ---
104
+ with st.sidebar:
105
+ # Check if any chat has more than just the system prompt
106
+ has_real_chats = any(
107
+ len(history) > 1 for history in st.session_state.chats.values()
108
+ )
109
+
110
+ if not has_real_chats:
111
+ # Show disabled red box instead of New Chat
112
+ st.button("📭 No saved conversations yet", use_container_width=True, disabled=True)
113
+
114
+ st.markdown("""
115
+ ### 👨‍🍳 Welcome!
116
+ Ask me anything about cooking:
117
+ - Recipes and ideas
118
+ - Ingredient substitutions
119
+ - Cooking techniques
120
+
121
+ *Try asking:*
122
+ • "What can I make with apples?"
123
+ • "How do I cook pasta al dente?"
124
+ """)
125
+ else:
126
+ # Always show New Chat button if chats exist
127
+ if st.button("🟥 New Chat", use_container_width=True):
128
+ new_chat()
129
+ st.rerun()
130
+
131
+ st.markdown("---")
132
+ st.subheader("📜 Chat History")
133
+
134
+ # List past chats
135
+ for chat_id, title in list(st.session_state.chat_titles.items()):
136
+ if len(st.session_state.chats.get(chat_id, [SYSTEM_PROMPT])) > 1:
137
+ display_title = title
138
+ is_current = chat_id == st.session_state.current_chat_id
139
+
140
+ if st.button(
141
+ display_title,
142
+ key=f"chat_switch_{chat_id}",
143
+ type="primary" if is_current else "secondary",
144
+ use_container_width=True
145
+ ):
146
+ set_current_chat(chat_id)
147
+ st.rerun()
148
+
149
+ # --- Main App Execution ---
150
+
151
+ # Step 1: Capture user prompt
152
+ # --- Conversation Logic: Show user message immediately and stream bot response ---
153
+ if prompt := st.chat_input("Ask about a recipe, technique, or substitution..."):
154
+ if CHAT_MODEL is None:
155
+ st.session_state.chats[st.session_state.current_chat_id].append(HumanMessage(content=prompt))
156
+ st.session_state.chats[st.session_state.current_chat_id].append(
157
+ AIMessage(content="Error: Model is not initialized. Check API key setup.")
158
+ )
159
+ st.rerun()
160
+
161
+ # 1. Append user message to the current chat history
162
+ st.session_state.chats[st.session_state.current_chat_id].append(HumanMessage(content=prompt))
163
+
164
+
165
+ # --- DOMAIN FILTER: Only allow culinary-related queries ---
166
+ culinary_keywords = [
167
+ # General
168
+ "cook", "cooking", "kitchen", "chef", "meal", "food", "dish", "recipe", "cuisine", "menu", "flavor", "taste",
169
+ # Ingredients
170
+ "ingredient", "spice", "herb", "oil", "salt", "pepper", "garlic", "onion", "tomato", "butter", "cheese",
171
+ "meat", "beef", "pork", "chicken", "lamb", "fish", "seafood", "shrimp", "crab", "lobster",
172
+ "vegetable", "fruit", "grain", "rice", "pasta", "bread", "noodles", "beans", "tofu", "egg",
173
+ # Techniques
174
+ "bake", "roast", "grill", "barbecue", "bbq", "fry", "deep fry", "saute", "sauté", "boil", "steam", "poach",
175
+ "simmer", "stew", "braise", "marinate", "blend", "chop", "slice", "dice", "whisk", "knead", "ferment",
176
+ # Dishes
177
+ "soup", "salad", "sandwich", "burger", "pizza", "pasta", "stew", "curry", "sauce", "stir fry", "omelette",
178
+ "dessert", "cake", "cookie", "pie", "pastry", "bread", "tart", "pudding", "ice cream",
179
+ # Cuisines
180
+ "italian", "french", "spanish", "greek", "mediterranean", "japanese", "chinese", "korean", "thai",
181
+ "vietnamese", "indian", "mexican", "latin", "filipino", "turkish", "middle eastern", "moroccan",
182
+ # Diets & health
183
+ "vegan", "vegetarian", "gluten-free", "keto", "paleo", "halal", "kosher", "low-carb", "low-fat",
184
+ # Beverages
185
+ "coffee", "tea", "smoothie", "wine", "cocktail", "beer", "drink", "juice",
186
+ # Seasonal & Events
187
+ "thanksgiving", "christmas", "new year", "ramadan", "eid", "hanukkah", "valentine", "birthday", "party",
188
+ # Advanced Techniques
189
+ "sous vide", "confit", "smoking", "curing", "pickling", "plating", "molecular gastronomy",
190
+ # Professional Culinary Terms
191
+ "mise en place", "umami", "maillard reaction", "deglaçage", "roux", "stock", "broth",
192
+ # Specialty Ingredients
193
+ "truffle", "saffron", "caviar", "foie gras", "kimchi", "kombu", "nori", "tamarind", "matcha", "miso",
194
+ # Dietary Preferences
195
+ "diabetic-friendly", "heart-healthy", "organic", "sustainable", "farm-to-table",
196
+ # Equipment
197
+ "blender", "mixer", "pressure cooker", "air fryer", "cast iron", "oven", "microwave", "thermometer"
198
+ ]
199
+
200
+
201
+ # Common cooking question patterns
202
+ culinary_phrases = [
203
+ "what can i make with",
204
+ "how do i cook",
205
+ "how to cook",
206
+ "how to make",
207
+ "substitute for",
208
+ "what is",
209
+ "uses of"
210
+ ]
211
+
212
+ prompt_lower = prompt.lower()
213
+
214
+ # Check both keywords and phrases
215
+ is_culinary = (
216
+ any(word in prompt_lower for word in culinary_keywords) or
217
+ any(phrase in prompt_lower for phrase in culinary_phrases)
218
+ )
219
+
220
+ if not is_culinary:
221
+ restriction_msg = AIMessage(content="⚠️ I can only answer questions about cooking, recipes, ingredients, or culinary techniques. Please ask something food-related.")
222
+ st.session_state.chats[st.session_state.current_chat_id].append(restriction_msg)
223
+ st.rerun()
224
+ else:
225
+ st.session_state.generate_next = True
226
+ st.rerun()
227
+
228
+
229
+
230
+ # Step 2: Generate model response
231
+ messages = get_current_messages()
232
+
233
+ # --- Render chat history first ---
234
+ if len(messages) == 1 and isinstance(messages[0], SystemMessage):
235
+ message("Start the conversation by typing your first culinary question below!", key="welcome_bubble")
236
+
237
+ for i in range(1, len(messages)):
238
+ content, role = convert_to_streamlit_message(messages[i])
239
+ if not content:
240
+ continue
241
+
242
+ if role == "assistant":
243
+ if "recipe" in content.lower():
244
+ message(f"👨‍🍳 **Chef’s Recipe:**\n\n{content}", key=f"chat_ai_{i}")
245
+ else:
246
+ message(f"👨‍🍳 {content}", key=f"chat_ai_{i}")
247
+ elif role == "user":
248
+ message(content, is_user=True, key=f"chat_user_{i}")
249
+
250
+
251
+ # --- Now show spinner / generate new response ---
252
+ if st.session_state.generate_next:
253
+ st.session_state.generate_next = False
254
+
255
+ full_history = get_current_messages()
256
+
257
+ # Spinner will show at the bottom of the chat
258
+ with st.spinner("👨‍🍳 Our culinary expert is crafting your response..."):
259
+ try:
260
+ ai_message: AIMessage = CHAT_MODEL.invoke(full_history)
261
+ st.session_state.chats[st.session_state.current_chat_id].append(ai_message)
262
+
263
+ # Rename untitled chat with first user message
264
+ if st.session_state.chat_titles[st.session_state.current_chat_id] == "New Chat":
265
+ st.session_state.chat_titles[st.session_state.current_chat_id] = full_history[-1].content[:30] + "..."
266
+ except Exception as e:
267
+ error_message = "I'm sorry, I encountered a brief issue while preparing the answer. Please try again."
268
+ st.session_state.chats[st.session_state.current_chat_id].append(AIMessage(content=error_message))
269
+ print(f"Full LLM invocation error: {e}")
270
+
271
+ st.rerun()