Spaces:
Sleeping
Sleeping
| import os | |
| import re | |
| import json | |
| import sqlite3 | |
| from typing import Any, Optional, Dict | |
| import streamlit as st | |
| from langchain_groq import ChatGroq | |
| from langchain_core.prompts import ChatPromptTemplate | |
| # ============================================================ | |
| # CONFIG | |
| # ============================================================ | |
| DB_PATH = "customer_orders.db" | |
| MODEL_NAME = "meta-llama/llama-4-scout-17b-16e-instruct" | |
| # ============================================================ | |
| # LLM INIT | |
| # ============================================================ | |
| def init_llm(): | |
| groq_key = os.getenv("GROQ_API_KEY") | |
| if not groq_key: | |
| st.error("Missing GROQ_API_KEY. Add it in Hugging Face β Settings β Secrets.") | |
| st.stop() | |
| os.environ["GROQ_API_KEY"] = groq_key | |
| return ChatGroq( | |
| model=MODEL_NAME, | |
| temperature=0, | |
| max_tokens=400, | |
| timeout=30, | |
| max_retries=2, | |
| ) | |
| llm = init_llm() | |
| # ============================================================ | |
| # HELPERS & LOGIC | |
| # ============================================================ | |
| ORDER_ID_HELPER_PATTERN = re.compile(r"\bO\d+\b", re.IGNORECASE) | |
| def try_extract_order_id(text: str) -> Optional[str]: | |
| if not text: | |
| return None | |
| m = ORDER_ID_HELPER_PATTERN.search(text) | |
| return m.group(0).upper() if m else None | |
| def fetch_order_as_json(order_id: str) -> Dict[str, Any]: | |
| """ | |
| β FINAL FIX | |
| Deterministic, read-only SQLite fetch. | |
| Works the same in Colab and Hugging Face. | |
| """ | |
| try: | |
| conn = sqlite3.connect(DB_PATH) | |
| conn.row_factory = sqlite3.Row | |
| cur = conn.cursor() | |
| cur.execute( | |
| "SELECT * FROM orders WHERE order_id = ? LIMIT 1;", | |
| (order_id,) | |
| ) | |
| row = cur.fetchone() | |
| conn.close() | |
| if row is None: | |
| return {"_error": "NOT_FOUND"} | |
| return dict(row) | |
| except Exception: | |
| return {"_error": "NOT_FOUND"} | |
| def chat_agent_response(user_query: str, session_order_id: str) -> str: | |
| oid = session_order_id or try_extract_order_id(user_query) | |
| if not oid: | |
| return "Iβd be happy to help. Could you please share your Order ID (example: O12486)?" | |
| order_data = fetch_order_as_json(oid) | |
| if order_data.get("_error") == "NOT_FOUND": | |
| return f"I'm sorry β I couldn't find any details for Order ID **{oid}**. Please re-check and try again." | |
| # Privacy guardrail | |
| if "cust_id" in order_data: | |
| order_data["cust_id"] = "[redacted]" | |
| prompt = ChatPromptTemplate.from_messages([ | |
| ("system", | |
| "You are FoodHub customer support.\n" | |
| "Write a polite, concise reply (2β4 lines).\n" | |
| "Use ONLY the provided order context.\n" | |
| "Do NOT expose customer identifiers.\n"), | |
| ("human", | |
| "User query:\n{user_query}\n\n" | |
| "Order context (JSON):\n{context}\n\n" | |
| "Final reply:") | |
| ]) | |
| chain = prompt | llm | |
| resp = chain.invoke({ | |
| "user_query": user_query, | |
| "context": json.dumps(order_data) | |
| }) | |
| return getattr(resp, "content", str(resp)) | |
| # ============================================================ | |
| # STREAMLIT UI | |
| # ============================================================ | |
| st.set_page_config(page_title="FoodHub Bot", page_icon="π") | |
| st.title("π FoodHub Support AI") | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| if "order_id" not in st.session_state: | |
| st.session_state.order_id = "" | |
| # Show chat history | |
| for msg in st.session_state.messages: | |
| with st.chat_message(msg["role"]): | |
| st.markdown(msg["content"]) | |
| # User input | |
| user_text = st.chat_input("How can I help with your order?") | |
| if user_text: | |
| st.session_state.messages.append({"role": "user", "content": user_text}) | |
| with st.chat_message("user"): | |
| st.markdown(user_text) | |
| detected = try_extract_order_id(user_text) | |
| if detected: | |
| st.session_state.order_id = detected | |
| with st.spinner("Checking FoodHub records..."): | |
| response = chat_agent_response(user_text, st.session_state.order_id) | |
| st.session_state.messages.append({"role": "assistant", "content": response}) | |
| with st.chat_message("assistant"): | |
| st.markdown(response) | |