JerameeUC commited on
Commit
d3c9e13
·
1 Parent(s): 647032a

Fixed what I broke maybe

Browse files
Files changed (1) hide show
  1. app_storefront.py +57 -65
app_storefront.py CHANGED
@@ -3,50 +3,55 @@ import os
3
  import sys
4
  import gradio as gr
5
 
6
- # Make "core/" importable even on Spaces
7
  sys.path.append(os.path.join(os.path.dirname(__file__), "core"))
8
 
9
- # --- New modules ---
10
  from core.model import model_generate, MODEL_NAME
11
  from core.memory import build_prompt_from_history
12
- from core.storefront import (
13
- load_storefront,
14
- storefront_qna,
15
- extract_products,
16
- get_rules,
17
- DEFAULT_PRODUCTS as CORE_DEFAULT_PRODUCTS,
18
- DEFAULT_PARKING as CORE_DEFAULT_PARKING,
19
- DEFAULT_VENUE as CORE_DEFAULT_VENUE,
20
- USE_HELPERS,
21
- STORE_DATA,
22
- )
23
-
24
- # ---------------- Data for tabs (safe fallbacks) ----------------
25
- # Try to load JSON data; if not available, use module defaults.
26
- _loaded = load_storefront() # may be None if JSON absent/empty
27
- if _loaded:
28
- PRODUCTS = extract_products(_loaded) or CORE_DEFAULT_PRODUCTS
29
- RULES = get_rules(_loaded) or {"venue": CORE_DEFAULT_VENUE, "parking": CORE_DEFAULT_PARKING}
 
 
 
 
 
 
 
 
 
 
30
  else:
31
- PRODUCTS = CORE_DEFAULT_PRODUCTS
32
- RULES = {"venue": CORE_DEFAULT_VENUE, "parking": CORE_DEFAULT_PARKING}
 
33
 
34
- DEFAULT_PRODUCTS = PRODUCTS
35
- DEFAULT_VENUE = RULES.get("venue", CORE_DEFAULT_VENUE)
36
- DEFAULT_PARKING = RULES.get("parking", CORE_DEFAULT_PARKING)
37
-
38
- # ---------------- Utilities ----------------
39
  def clean_generation(text: str) -> str:
40
- """Tiny post-process to strip runaway prefixes or trailing artifacts."""
41
  return (text or "").strip()
42
 
43
  # ---------------- Chat logic ----------------
44
  def chat_pipeline(history, message, max_new_tokens=128, temperature=0.8, top_p=0.95):
45
- # 1) Intercept storefront facts first (reduces hallucinations)
46
- sf = storefront_qna(message)
47
  if sf:
48
  return sf
49
- # 2) Memory-aware prompt for the model
50
  prompt = build_prompt_from_history(history, message, k=4)
51
  gen = model_generate(prompt, max_new_tokens, temperature, top_p)
52
  return clean_generation(gen)
@@ -62,24 +67,17 @@ CSS = """
62
  with gr.Blocks(title="Storefront Chat", css=CSS) as demo:
63
  gr.Markdown("## Storefront Chat")
64
 
65
- # Single source of truth for conversation history
66
  history_state = gr.State([])
67
 
68
  with gr.Tabs():
69
  # --- TAB: Chat ---
70
  with gr.TabItem("Chat"):
71
  with gr.Group(elem_classes=["panel"]):
72
- chat = gr.Chatbot(
73
- height=360,
74
- bubble_full_width=False,
75
- label="Chat"
76
- )
77
 
78
  with gr.Row():
79
- msg = gr.Textbox(
80
- placeholder="Ask about parking rules, attire, cap & gown, pickup times…",
81
- scale=5
82
- )
83
  send = gr.Button("Send", scale=1)
84
 
85
  # Quick chips
@@ -89,7 +87,7 @@ with gr.Blocks(title="Storefront Chat", css=CSS) as demo:
89
  chip3 = gr.Button("Attire", variant="secondary")
90
  chip4 = gr.Button("When do lots open?", variant="secondary")
91
 
92
- # Advanced (sliders + health/capabilities) — kept off the main screen
93
  with gr.Accordion("Advanced chat options", open=False):
94
  max_new = gr.Slider(32, 512, 128, 1, label="Max new tokens")
95
  temp = gr.Slider(0.1, 1.5, 0.8, 0.05, label="Temperature")
@@ -103,16 +101,16 @@ with gr.Blocks(title="Storefront Chat", css=CSS) as demo:
103
  # --- TAB: Products ---
104
  with gr.TabItem("Products"):
105
  gr.Markdown("### Available Items")
106
- cols = list(DEFAULT_PRODUCTS[0].keys()) if DEFAULT_PRODUCTS else ["SKU","Name","Price","Notes"]
107
- data = [[p.get(c, "") for c in cols] for p in DEFAULT_PRODUCTS]
108
- gr.Dataframe(headers=cols, value=data, interactive=False, wrap=True, label="Products")
109
 
110
  # --- TAB: Rules ---
111
  with gr.TabItem("Rules"):
112
  gr.Markdown("### Venue rules")
113
- gr.Markdown("- " + "\n- ".join(DEFAULT_VENUE))
114
  gr.Markdown("### Parking rules")
115
- gr.Markdown("- " + "\n- ".join(DEFAULT_PARKING))
116
 
117
  # --- TAB: Logistics ---
118
  with gr.TabItem("Logistics"):
@@ -127,7 +125,6 @@ with gr.Blocks(title="Storefront Chat", css=CSS) as demo:
127
 
128
  # ---------- Helpers ----------
129
  def _append_bot_md(history, md_text):
130
- """Append a bot-side markdown message while preserving the [[u,b],…] format."""
131
  history = history or []
132
  return history + [[None, md_text]]
133
 
@@ -135,49 +132,44 @@ with gr.Blocks(title="Storefront Chat", css=CSS) as demo:
135
  def on_send(history, message, max_new_tokens, temperature, top_p):
136
  t = (message or "").strip()
137
  if not t:
138
- return history, history, "" # no-op; keep shapes identical
139
- # add user row
140
  history = (history or []) + [[t, None]]
141
- # generate reply using history prior to this turn
142
  reply = chat_pipeline(history[:-1], t, max_new_tokens, temperature, top_p)
143
  history[-1][1] = reply
144
- # update both state and Chatbot, clear textbox
145
  return history, history, ""
146
 
147
  def _health_cb(history):
148
  md = (
149
  f"### Status: ✅ Healthy\n"
150
  f"- Model: `{MODEL_NAME}`\n"
151
- f"- Storefront module: {'yes' if USE_HELPERS else 'no'}\n"
152
- f"- Storefront JSON: {'loaded' if bool(STORE_DATA) else 'not found'}"
153
  )
154
  new_hist = _append_bot_md(history, md)
155
- # Update chat AND status label
156
  return new_hist, new_hist, "Status: ✅ Healthy"
157
 
158
  def _caps_cb(history):
159
- caps = [
160
- "Chat (LLM text-generation, memory-aware prompt)",
161
- "Storefront Q&A (parking, attire, products, logistics)",
162
- "Adjustable: max_new_tokens, temperature, top-p",
163
- ]
164
- md = "### Capabilities\n- " + "\n- ".join(caps)
165
  new_hist = _append_bot_md(history, md)
166
  return new_hist, new_hist
167
 
168
- # Wire up (update both the state and Chatbot widget)
169
  send.click(on_send, [history_state, msg, max_new, temp, topp], [history_state, chat, msg])
170
  msg.submit(on_send, [history_state, msg, max_new, temp, topp], [history_state, chat, msg])
171
 
172
- # Chips → prefill textbox (don’t mutate history)
173
  chip1.click(lambda: "What are the parking rules?", outputs=msg)
174
  chip2.click(lambda: "Can I buy multiple parking passes?", outputs=msg)
175
  chip3.click(lambda: "Is formal attire required?", outputs=msg)
176
  chip4.click(lambda: "What time do the parking lots open?", outputs=msg)
177
 
178
- # Health / Capabilities: now inside the Advanced accordion
179
  health_btn.click(_health_cb, inputs=[history_state], outputs=[history_state, chat, status_md])
180
- caps_btn.click(_caps_cb, inputs=[history_state], outputs=[history_state, chat])
181
 
182
  if __name__ == "__main__":
183
  demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", "7860")))
 
3
  import sys
4
  import gradio as gr
5
 
6
+ # Ensure "core/" is importable
7
  sys.path.append(os.path.join(os.path.dirname(__file__), "core"))
8
 
9
+ # Import only functions; core.storefront doesn't export constants
10
  from core.model import model_generate, MODEL_NAME
11
  from core.memory import build_prompt_from_history
12
+ from core.storefront import load_storefront, storefront_qna, extract_products, get_rules
13
+
14
+ # ---------------- Load data + safe fallbacks ----------------
15
+ DATA = load_storefront() # may be None if storefront_data.json missing/empty
16
+
17
+ # Fallbacks used if JSON not present
18
+ FALLBACK_PRODUCTS = [
19
+ {"sku": "CG-SET", "name": "Cap & Gown Set", "price": 59.00,
20
+ "notes": "Tassel included; ships until 10 days before the event"},
21
+ {"sku": "PK-1", "name": "Parking Pass", "price": 10.00,
22
+ "notes": "Multiple passes are allowed per student"}
23
+ ]
24
+ FALLBACK_VENUE = [
25
+ "Formal attire recommended (not required).",
26
+ "No muscle shirts.",
27
+ "No sagging pants."
28
+ ]
29
+ FALLBACK_PARKING = [
30
+ "No double parking.",
31
+ "Vehicles parked in handicap spaces will be towed."
32
+ ]
33
+
34
+ # Normalize products/rules for the tabs
35
+ if DATA:
36
+ PRODUCTS = extract_products(DATA) or FALLBACK_PRODUCTS
37
+ venue_rules, parking_rules = get_rules(DATA)
38
+ VENUE_RULES = venue_rules or FALLBACK_VENUE
39
+ PARKING_RULES = parking_rules or FALLBACK_PARKING
40
  else:
41
+ PRODUCTS = FALLBACK_PRODUCTS
42
+ VENUE_RULES = FALLBACK_VENUE
43
+ PARKING_RULES = FALLBACK_PARKING
44
 
 
 
 
 
 
45
  def clean_generation(text: str) -> str:
 
46
  return (text or "").strip()
47
 
48
  # ---------------- Chat logic ----------------
49
  def chat_pipeline(history, message, max_new_tokens=128, temperature=0.8, top_p=0.95):
50
+ # 1) Use storefront facts first (reduces hallucinations)
51
+ sf = storefront_qna(DATA, message) # <-- pass DATA!
52
  if sf:
53
  return sf
54
+ # 2) Memory-aware prompt to keep context grounded
55
  prompt = build_prompt_from_history(history, message, k=4)
56
  gen = model_generate(prompt, max_new_tokens, temperature, top_p)
57
  return clean_generation(gen)
 
67
  with gr.Blocks(title="Storefront Chat", css=CSS) as demo:
68
  gr.Markdown("## Storefront Chat")
69
 
70
+ # Single history state (kept in sync with Chatbot)
71
  history_state = gr.State([])
72
 
73
  with gr.Tabs():
74
  # --- TAB: Chat ---
75
  with gr.TabItem("Chat"):
76
  with gr.Group(elem_classes=["panel"]):
77
+ chat = gr.Chatbot(height=360, bubble_full_width=False, label="Chat")
 
 
 
 
78
 
79
  with gr.Row():
80
+ msg = gr.Textbox(placeholder="Ask about parking rules, attire, cap & gown, pickup times…", scale=5)
 
 
 
81
  send = gr.Button("Send", scale=1)
82
 
83
  # Quick chips
 
87
  chip3 = gr.Button("Attire", variant="secondary")
88
  chip4 = gr.Button("When do lots open?", variant="secondary")
89
 
90
+ # Advanced options (sliders + Health/Capabilities)
91
  with gr.Accordion("Advanced chat options", open=False):
92
  max_new = gr.Slider(32, 512, 128, 1, label="Max new tokens")
93
  temp = gr.Slider(0.1, 1.5, 0.8, 0.05, label="Temperature")
 
101
  # --- TAB: Products ---
102
  with gr.TabItem("Products"):
103
  gr.Markdown("### Available Items")
104
+ cols = ["sku", "name", "price", "notes"]
105
+ data = [[p.get(c, "") for c in cols] for p in PRODUCTS]
106
+ gr.Dataframe(headers=[c.upper() for c in cols], value=data, interactive=False, wrap=True, label="Products")
107
 
108
  # --- TAB: Rules ---
109
  with gr.TabItem("Rules"):
110
  gr.Markdown("### Venue rules")
111
+ gr.Markdown("- " + "\n- ".join(VENUE_RULES))
112
  gr.Markdown("### Parking rules")
113
+ gr.Markdown("- " + "\n- ".join(PARKING_RULES))
114
 
115
  # --- TAB: Logistics ---
116
  with gr.TabItem("Logistics"):
 
125
 
126
  # ---------- Helpers ----------
127
  def _append_bot_md(history, md_text):
 
128
  history = history or []
129
  return history + [[None, md_text]]
130
 
 
132
  def on_send(history, message, max_new_tokens, temperature, top_p):
133
  t = (message or "").strip()
134
  if not t:
135
+ return history, history, "" # no-op; shapes must match
 
136
  history = (history or []) + [[t, None]]
 
137
  reply = chat_pipeline(history[:-1], t, max_new_tokens, temperature, top_p)
138
  history[-1][1] = reply
 
139
  return history, history, ""
140
 
141
  def _health_cb(history):
142
  md = (
143
  f"### Status: ✅ Healthy\n"
144
  f"- Model: `{MODEL_NAME}`\n"
145
+ f"- Storefront JSON: {'loaded' if bool(DATA) else 'not found'}"
 
146
  )
147
  new_hist = _append_bot_md(history, md)
 
148
  return new_hist, new_hist, "Status: ✅ Healthy"
149
 
150
  def _caps_cb(history):
151
+ md = (
152
+ "### Capabilities\n"
153
+ "- Chat (LLM text-generation, memory-aware prompt)\n"
154
+ "- Storefront Q&A (parking, attire, products, logistics)\n"
155
+ "- Adjustable: max_new_tokens, temperature, top-p"
156
+ )
157
  new_hist = _append_bot_md(history, md)
158
  return new_hist, new_hist
159
 
160
+ # Wire up (state + chatbot)
161
  send.click(on_send, [history_state, msg, max_new, temp, topp], [history_state, chat, msg])
162
  msg.submit(on_send, [history_state, msg, max_new, temp, topp], [history_state, chat, msg])
163
 
164
+ # Chips → prefill textbox
165
  chip1.click(lambda: "What are the parking rules?", outputs=msg)
166
  chip2.click(lambda: "Can I buy multiple parking passes?", outputs=msg)
167
  chip3.click(lambda: "Is formal attire required?", outputs=msg)
168
  chip4.click(lambda: "What time do the parking lots open?", outputs=msg)
169
 
170
+ # Health / Capabilities live inside Advanced
171
  health_btn.click(_health_cb, inputs=[history_state], outputs=[history_state, chat, status_md])
172
+ caps_btn.click(_caps_cb, inputs=[history_state], outputs=[history_state, chat])
173
 
174
  if __name__ == "__main__":
175
  demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", "7860")))