Fred808 commited on
Commit
97ff82b
·
verified ·
1 Parent(s): e6cea55

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -3
app.py CHANGED
@@ -364,14 +364,14 @@ async def chatbot_response(request: Request, background_tasks: BackgroundTasks):
364
  elif sentiment_score > 0.3:
365
  sentiment_modifier = "Great to hear from you! "
366
 
367
- # Check if this is an order flow request
 
368
  order_response = process_order_flow(user_id, user_message)
369
  if order_response:
370
  background_tasks.add_task(log_chat_to_db, user_id, "outbound", order_response)
371
  return JSONResponse(content={"response": sentiment_modifier + order_response})
372
 
373
-
374
- # Check for specialized commands:
375
  if "menu" in user_message.lower():
376
  # Return menu with images and options for selection
377
  menu_with_images = []
@@ -396,6 +396,52 @@ async def chatbot_response(request: Request, background_tasks: BackgroundTasks):
396
  background_tasks.add_task(log_chat_to_db, user_id, "outbound", str(response_payload))
397
  return JSONResponse(content=response_payload)
398
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
399
 
400
 
401
  # Handle dish selection for ordering
 
364
  elif sentiment_score > 0.3:
365
  sentiment_modifier = "Great to hear from you! "
366
 
367
+ # ========== REORDERED LOGIC ==========
368
+ # 1. Check if the user is already in an order flow
369
  order_response = process_order_flow(user_id, user_message)
370
  if order_response:
371
  background_tasks.add_task(log_chat_to_db, user_id, "outbound", order_response)
372
  return JSONResponse(content={"response": sentiment_modifier + order_response})
373
 
374
+ # 2. Handle menu display
 
375
  if "menu" in user_message.lower():
376
  # Return menu with images and options for selection
377
  menu_with_images = []
 
396
  background_tasks.add_task(log_chat_to_db, user_id, "outbound", str(response_payload))
397
  return JSONResponse(content=response_payload)
398
 
399
+ # 3. Handle dish selection (ONLY if not in order flow)
400
+ if any(item["name"].lower() in user_message.lower() for item in menu_items) or \
401
+ any(str(index) == user_message.strip() for index, item in enumerate(menu_items, start=1)):
402
+ # Extract the selected dish
403
+ selected_dish = None
404
+ if user_message.strip().isdigit():
405
+ # User selected by number
406
+ dish_number = int(user_message.strip())
407
+ if 1 <= dish_number <= len(menu_items):
408
+ selected_dish = menu_items[dish_number - 1]["name"]
409
+ else:
410
+ # User selected by name
411
+ for item in menu_items:
412
+ if item["name"].lower() in user_message.lower():
413
+ selected_dish = item["name"]
414
+ break
415
+
416
+ if selected_dish:
417
+ # Trigger the order flow
418
+ user_state[user_id] = {"flow": "order", "step": 1, "data": {"dish": selected_dish}, "last_active": datetime.utcnow()}
419
+ response_text = f"You selected {selected_dish}. How many servings would you like?"
420
+ background_tasks.add_task(log_chat_to_db, user_id, "outbound", response_text)
421
+ return JSONResponse(content={"response": sentiment_modifier + response_text})
422
+ else:
423
+ response_text = "Sorry, I couldn't find that dish in the menu. Please try again."
424
+ background_tasks.add_task(log_chat_to_db, user_id, "outbound", response_text)
425
+ return JSONResponse(content={"response": sentiment_modifier + response_text})
426
+
427
+ # 4. Handle nutritional facts
428
+ if "nutritional facts for" in user_message.lower():
429
+ dish_name = user_message.lower().replace("nutritional facts for", "").strip().title()
430
+ dish = next((item for item in menu_items if item["name"].lower() == dish_name.lower()), None)
431
+ if dish:
432
+ response_text = f"Nutritional facts for {dish['name']}:\n{dish['nutrition']}"
433
+ else:
434
+ response_text = f"Sorry, I couldn't find nutritional facts for {dish_name}."
435
+ background_tasks.add_task(log_chat_to_db, user_id, "outbound", response_text)
436
+ return JSONResponse(content={"response": sentiment_modifier + response_text})
437
+
438
+ # 5. Fallback: use NVIDIA text LLM streaming for a response
439
+ prompt = f"User query: {user_message}\nGenerate a helpful, personalized response for a restaurant chatbot."
440
+ def stream_response():
441
+ for chunk in stream_text_completion(prompt):
442
+ yield chunk
443
+ background_tasks.add_task(log_chat_to_db, user_id, "outbound", f"LLM fallback response for prompt: {prompt}")
444
+ return StreamingResponse(stream_response(), media_type="text/plain")
445
 
446
 
447
  # Handle dish selection for ordering