File size: 4,295 Bytes
dadc166
 
 
ef25d62
dadc166
 
 
 
 
 
 
 
 
 
 
 
 
ef25d62
dadc166
 
 
ef25d62
dadc166
ef25d62
dadc166
 
 
 
ef25d62
 
 
 
 
 
dadc166
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ef25d62
 
 
dadc166
 
ef25d62
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dadc166
 
 
 
 
 
 
ef25d62
dadc166
 
 
 
ef25d62
dadc166
ef25d62
dadc166
ef25d62
dadc166
 
 
 
ef25d62
 
 
 
 
 
 
 
 
dadc166
 
 
ef25d62
 
 
 
 
dadc166
 
 
 
 
 
 
 
 
 
 
 
 
 
ef25d62
dadc166
 
 
 
ef25d62
dadc166
 
 
 
 
 
 
ef25d62
 
 
dadc166
 
 
 
 
 
 
ef25d62
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
import os
import re
import json
import sqlite3
from typing import Any, Optional, Dict
import streamlit as st

from langchain_groq import ChatGroq
from langchain_core.prompts import ChatPromptTemplate

# ============================================================
# CONFIG
# ============================================================
DB_PATH = "customer_orders.db"
MODEL_NAME = "meta-llama/llama-4-scout-17b-16e-instruct"

# ============================================================
# LLM INIT
# ============================================================
@st.cache_resource
def init_llm():
    groq_key = os.getenv("GROQ_API_KEY")
    if not groq_key:
        st.error("Missing GROQ_API_KEY. Add it in Hugging Face β†’ Settings β†’ Secrets.")
        st.stop()

    os.environ["GROQ_API_KEY"] = groq_key

    return ChatGroq(
        model=MODEL_NAME,
        temperature=0,
        max_tokens=400,
        timeout=30,
        max_retries=2,
    )

llm = init_llm()

# ============================================================
# HELPERS & LOGIC
# ============================================================
ORDER_ID_HELPER_PATTERN = re.compile(r"\bO\d+\b", re.IGNORECASE)

def try_extract_order_id(text: str) -> Optional[str]:
    if not text:
        return None
    m = ORDER_ID_HELPER_PATTERN.search(text)
    return m.group(0).upper() if m else None


def fetch_order_as_json(order_id: str) -> Dict[str, Any]:
    """
    βœ… FINAL FIX
    Deterministic, read-only SQLite fetch.
    Works the same in Colab and Hugging Face.
    """
    try:
        conn = sqlite3.connect(DB_PATH)
        conn.row_factory = sqlite3.Row
        cur = conn.cursor()

        cur.execute(
            "SELECT * FROM orders WHERE order_id = ? LIMIT 1;",
            (order_id,)
        )
        row = cur.fetchone()
        conn.close()

        if row is None:
            return {"_error": "NOT_FOUND"}

        return dict(row)

    except Exception:
        return {"_error": "NOT_FOUND"}


def chat_agent_response(user_query: str, session_order_id: str) -> str:
    oid = session_order_id or try_extract_order_id(user_query)

    if not oid:
        return "I’d be happy to help. Could you please share your Order ID (example: O12486)?"

    order_data = fetch_order_as_json(oid)

    if order_data.get("_error") == "NOT_FOUND":
        return f"I'm sorry β€” I couldn't find any details for Order ID **{oid}**. Please re-check and try again."

    # Privacy guardrail
    if "cust_id" in order_data:
        order_data["cust_id"] = "[redacted]"

    prompt = ChatPromptTemplate.from_messages([
        ("system",
         "You are FoodHub customer support.\n"
         "Write a polite, concise reply (2–4 lines).\n"
         "Use ONLY the provided order context.\n"
         "Do NOT expose customer identifiers.\n"),
        ("human",
         "User query:\n{user_query}\n\n"
         "Order context (JSON):\n{context}\n\n"
         "Final reply:")
    ])

    chain = prompt | llm
    resp = chain.invoke({
        "user_query": user_query,
        "context": json.dumps(order_data)
    })

    return getattr(resp, "content", str(resp))


# ============================================================
# STREAMLIT UI
# ============================================================
st.set_page_config(page_title="FoodHub Bot", page_icon="πŸ”")
st.title("πŸ” FoodHub Support AI")

if "messages" not in st.session_state:
    st.session_state.messages = []
if "order_id" not in st.session_state:
    st.session_state.order_id = ""

# Show chat history
for msg in st.session_state.messages:
    with st.chat_message(msg["role"]):
        st.markdown(msg["content"])

# User input
user_text = st.chat_input("How can I help with your order?")

if user_text:
    st.session_state.messages.append({"role": "user", "content": user_text})
    with st.chat_message("user"):
        st.markdown(user_text)

    detected = try_extract_order_id(user_text)
    if detected:
        st.session_state.order_id = detected

    with st.spinner("Checking FoodHub records..."):
        response = chat_agent_response(user_text, st.session_state.order_id)

    st.session_state.messages.append({"role": "assistant", "content": response})
    with st.chat_message("assistant"):
        st.markdown(response)