ZENLLC commited on
Commit
13db0a0
·
verified ·
1 Parent(s): 42ad35c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +96 -72
app.py CHANGED
@@ -6,146 +6,170 @@ Skills
6
  • /math <expr> — safe calculator
7
  • /summarize <text> — 2-sentence TL;DR
8
  • /translate_es <text> — English → Spanish
9
- • /ascii <text> — big FIGlet ASCII art
10
- • Any question ending with “?” → live Wikipedia answer
11
- Model: facebook/blenderbot-400M-distill (public, ~720 MB weights)
 
 
 
12
  """
13
 
14
  import ast, math, re, gc, traceback, torch, wikipedia, pyfiglet
15
  import gradio as gr
16
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
17
 
18
- # ──────────────────────────────────────────────────────────────────────────────
19
  # 0 · helpers
20
- # ──────────────────────────────────────────────────────────────────────────────
21
- def err(e: Exception) -> str:
22
- print("\n=== ZEN-Bot ERROR ===")
23
  traceback.print_exc()
24
- print("====================\n")
25
  return f"⚠️ {type(e).__name__}: {e}"
26
 
27
- # ──────────────────────────────────────────────────────────────────────────────
28
- # 1 · model (fits free CPU tier)
29
- # ──────────────────────────────────────────────────────────────────────────────
30
- MODEL_NAME = "facebook/blenderbot-400M-distill" # always public
31
 
32
- tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
33
- model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME)
34
  model.eval(); torch.set_grad_enabled(False)
35
 
36
- GEN_KW = dict(
37
- max_new_tokens = 64,
38
- do_sample = False, # fast & deterministic
39
- )
40
 
41
  def llm(prompt: str) -> str:
 
42
  try:
43
- ids_in = tokenizer(prompt, return_tensors="pt",
44
- truncation=True, max_length=1024).input_ids
 
 
45
  with torch.no_grad():
46
- ids_out = model.generate(ids_in, **GEN_KW)
47
- reply_ids = ids_out[0, ids_in.shape[-1]:]
48
- return tokenizer.decode(reply_ids, skip_special_tokens=True).strip()
 
49
  except Exception as e:
50
- return err(e)
51
 
52
- # ──────────────────────────────────────────────────────────────────────────────
53
- # 2 · /math (safe)
54
- # ──────────────────────────────────────────────────────────────────────────────
55
- _ALLOWED = {k: getattr(math, k) for k in dir(math) if not k.startswith("__")}
56
  _ALLOWED.update({"abs": abs, "round": round})
57
 
58
  def safe_math(expr: str) -> str:
59
  try:
60
  node = ast.parse(expr, mode="eval")
 
61
  def ok(n):
62
- if isinstance(n, ast.Num): return True
63
- if isinstance(n, ast.UnaryOp): return ok(n.operand)
64
- if isinstance(n, ast.BinOp): return ok(n.left) and ok(n.right)
65
- if isinstance(n, ast.Call):
66
- return (isinstance(n.func, ast.Name)
67
- and n.func.id in _ALLOWED
68
- and all(ok(a) for a in n.args))
69
- return False
 
 
70
  if not ok(node.body):
71
- return "⛔️ Expression not allowed."
72
  return str(eval(compile(node, "<expr>", "eval"),
73
  {"__builtins__": {}}, _ALLOWED))
74
  except Exception as e:
75
- return err(e)
76
 
77
- # ──────────────────────────────────────────────────────────────────────────────
78
- # 3 · Wikipedia Q&A
79
- # ──────────────────────────────────────────────────────────────────────────────
80
- def wiki_answer(q: str) -> str | None:
 
 
 
81
  try:
 
 
 
 
 
82
  wikipedia.set_lang("en")
83
- page = wikipedia.page(q, auto_suggest=True, redirect=True)
84
- summ = wikipedia.summary(page.title, sentences=3, auto_suggest=False)
85
- return f"**{page.title}** — {summ}"
 
 
 
86
  except (wikipedia.DisambiguationError, wikipedia.PageError):
87
- return None
88
  except Exception as e:
89
- return err(e)
90
 
91
- # ──────────────────────────────────────────────────────────────────────────────
92
  # 4 · ASCII art
93
- # ──────────────────────────────────────────────────────────────────────────────
94
  def ascii_art(txt: str) -> str:
95
- try: return pyfiglet.figlet_format(txt, width=120)
96
- except Exception as e: return err(e)
 
 
97
 
98
- # ──────────────────────────────────────────────────────────────────────────────
99
- # 5 · main router
100
- # ──────────────────────────────────────────────────────────────────────────────
101
  CMD = re.compile(r"^/(math|summarize|translate_es|ascii)\s+(.+)", re.S | re.I)
102
 
103
- def respond(msg: str, hist: list[list[str, str]]) -> str:
104
- # 5-A · commands
105
- m = CMD.match(msg.strip())
106
- if m:
 
 
 
107
  cmd, body = m.group(1).lower(), m.group(2).strip()
108
  if cmd == "math": return safe_math(body)
109
  if cmd == "ascii": return ascii_art(body)
110
- if cmd == "summarize": return llm(f"Summarize in two concise sentences:\n\n{body}\n\nSummary:")
111
  if cmd == "translate_es": return llm(f"Translate into Spanish (natural):\n\n{body}\n\nSpanish:")
112
 
113
- # 5-B · Wikipedia for factual questions
114
- if msg.strip().endswith("?") and len(msg.split()) > 2:
115
- wiki = wiki_answer(msg)
116
  if wiki: return wiki
117
 
118
- # 5-C · normal chat (keep last 6 turns)
119
- prompt = "You are ZEN-Bot, a concise, helpful tutor for young AI pioneers.\n\n"
120
- for u, b in hist[-6:]:
121
  prompt += f"User: {u}\nAssistant: {b}\n"
122
- prompt += f"User: {msg}\nAssistant:"
123
  return llm(prompt)
124
 
125
- # ──────────────────────────────────────────────────────────────────────────────
126
  # 6 · Gradio UI
127
- # ──────────────────────────────────────────────────────────────────────────────
128
  demo = gr.ChatInterface(
129
  fn = respond,
130
  title = "🚀 ZEN-Bot Ultimate (Key-Free)",
131
  description = (
132
  "**Commands** \n"
133
- "• normal chat \n"
134
  "• `/math 2**5 / (sin(0.5)+1)` \n"
135
  "• `/summarize <text>` \n"
136
  "• `/translate_es Hello!` \n"
137
  "• `/ascii ZEN` \n"
138
- "• Ask any factual question ending with `?` for live Wikipedia answer"
139
  ),
140
- theme = "soft",
141
- fill_height = True,
142
  examples = [
143
  "Who discovered penicillin?",
144
  "/ascii AI ROCKS",
145
  "/math sqrt(144)+log(100,10)",
146
  "/summarize The Industrial Revolution began in Britain...",
147
  "/translate_es Good evening, friends!",
 
148
  ],
 
 
149
  )
150
 
151
  if __name__ == "__main__":
 
6
  • /math <expr> — safe calculator
7
  • /summarize <text> — 2-sentence TL;DR
8
  • /translate_es <text> — English → Spanish
9
+ • /ascii <text> — FIGlet ASCII art
10
+ • Any question ending with “?” → live Wikipedia answer
11
+ remembers the last topic, so “Who discovered it?” now works.
12
+ Model: facebook/blenderbot-400M-distill (~720 MB, public)
13
+
14
+ Copy-paste this file into your Space, keep the same requirements.txt, commit, done.
15
  """
16
 
17
  import ast, math, re, gc, traceback, torch, wikipedia, pyfiglet
18
  import gradio as gr
19
  from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
20
 
21
+ # ────────────────────────────────────────────────────────────────
22
  # 0 · helpers
23
+ # ────────────────────────────────────────────────────────────────
24
+ def log_err(e: Exception) -> str:
25
+ print("=== ZEN-Bot ERROR =====================")
26
  traceback.print_exc()
27
+ print("=======================================\n")
28
  return f"⚠️ {type(e).__name__}: {e}"
29
 
30
+ # ────────────────────────────────────────────────────────────────
31
+ # 1 · model (public, small enough for free CPU/basic)
32
+ # ────────────────────────────────────────────────────────────────
33
+ MODEL = "facebook/blenderbot-400M-distill"
34
 
35
+ tokenizer = AutoTokenizer.from_pretrained(MODEL)
36
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL)
37
  model.eval(); torch.set_grad_enabled(False)
38
 
39
+ GEN_ARGS = dict(max_new_tokens=64, do_sample=False) # greedy, fast
 
 
 
40
 
41
  def llm(prompt: str) -> str:
42
+ """Generate with BlenderBot-distill, catching any runtime errors."""
43
  try:
44
+ inputs = tokenizer(
45
+ prompt, return_tensors="pt",
46
+ truncation=True, max_length=1024
47
+ ).input_ids
48
  with torch.no_grad():
49
+ out = model.generate(inputs, **GEN_ARGS)
50
+ answer_ids = out[0, inputs.shape[-1]:]
51
+ return tokenizer.decode(answer_ids,
52
+ skip_special_tokens=True).strip()
53
  except Exception as e:
54
+ return log_err(e)
55
 
56
+ # ────────���───────────────────────────────────────────────────────
57
+ # 2 · safe /math
58
+ # ────────────────────────────────────────────────────────────────
59
+ _ALLOWED = {k: getattr(math, k) for k in dir(math) if not k.startswith("_")}
60
  _ALLOWED.update({"abs": abs, "round": round})
61
 
62
  def safe_math(expr: str) -> str:
63
  try:
64
  node = ast.parse(expr, mode="eval")
65
+
66
  def ok(n):
67
+ match n:
68
+ case ast.Num(): return True
69
+ case ast.UnaryOp(): return ok(n.operand)
70
+ case ast.BinOp(): return ok(n.left) and ok(n.right)
71
+ case ast.Call():
72
+ return (isinstance(n.func, ast.Name)
73
+ and n.func.id in _ALLOWED
74
+ and all(ok(a) for a in n.args))
75
+ case _: return False
76
+
77
  if not ok(node.body):
78
+ return "⛔️ Only basic maths & trig/log functions allowed."
79
  return str(eval(compile(node, "<expr>", "eval"),
80
  {"__builtins__": {}}, _ALLOWED))
81
  except Exception as e:
82
+ return log_err(e)
83
 
84
+ # ────────────────────────────────────────────────────────────────
85
+ # 3 · Wikipedia Q&A (with last-topic memory)
86
+ # ────────────────────────────────────────────────────────────────
87
+ last_topic: dict[str, str] = {} # {session_hash: "Penicillin"}
88
+
89
+ def wiki_answer(q: str, session_id: str) -> str | None:
90
+ """Return a 3-sentence Wikipedia summary or None if not found."""
91
  try:
92
+ # If the question uses 'it', swap with last remembered topic
93
+ query = q.strip()
94
+ if last_topic.get(session_id) and re.search(r"\bit\b", query, re.I):
95
+ query = re.sub(r"\bit\b", last_topic[session_id], query, flags=re.I)
96
+
97
  wikipedia.set_lang("en")
98
+ page = wikipedia.page(query, auto_suggest=True, redirect=True)
99
+ last_topic[session_id] = page.title # remember for pronouns
100
+ summary = wikipedia.summary(
101
+ page.title, sentences=3, auto_suggest=False
102
+ )
103
+ return f"**{page.title}** — {summary}"
104
  except (wikipedia.DisambiguationError, wikipedia.PageError):
105
+ return None # let LLM try instead
106
  except Exception as e:
107
+ return log_err(e)
108
 
109
+ # ────────────────────────────────────────────────────────────────
110
  # 4 · ASCII art
111
+ # ────────────────────────────────────────────────────────────────
112
  def ascii_art(txt: str) -> str:
113
+ try:
114
+ return pyfiglet.figlet_format(txt, width=120)
115
+ except Exception as e:
116
+ return log_err(e)
117
 
118
+ # ────────────────────────────────────────────────────────────────
119
+ # 5 · router ↔ ChatInterface callback
120
+ # ────────────────────────────────────────────────────────────────
121
  CMD = re.compile(r"^/(math|summarize|translate_es|ascii)\s+(.+)", re.S | re.I)
122
 
123
+ def respond(message: str,
124
+ history: list[list[str, str]],
125
+ request: gr.Request) -> str:
126
+ session_id = request.session_hash or "anon"
127
+
128
+ # 5-A · Command shortcuts
129
+ if (m := CMD.match(message.strip())):
130
  cmd, body = m.group(1).lower(), m.group(2).strip()
131
  if cmd == "math": return safe_math(body)
132
  if cmd == "ascii": return ascii_art(body)
133
+ if cmd == "summarize": return llm(f"Summarise in two concise sentences:\n\n{body}\n\nSummary:")
134
  if cmd == "translate_es": return llm(f"Translate into Spanish (natural):\n\n{body}\n\nSpanish:")
135
 
136
+ # 5-B · Wikipedia factual Q&A
137
+ if message.strip().endswith("?") and len(message.split()) > 2:
138
+ wiki = wiki_answer(message, session_id)
139
  if wiki: return wiki
140
 
141
+ # 5-C · Normal chat (keep last 6 turns for speed)
142
+ prompt = ("You are ZEN-Bot, a concise, friendly tutor for young AI pioneers.\n\n")
143
+ for u, b in history[-6:]:
144
  prompt += f"User: {u}\nAssistant: {b}\n"
145
+ prompt += f"User: {message}\nAssistant:"
146
  return llm(prompt)
147
 
148
+ # ────────────────────────────────────────────────────────────────
149
  # 6 · Gradio UI
150
+ # ────────────────────────────────────────────────────────────────
151
  demo = gr.ChatInterface(
152
  fn = respond,
153
  title = "🚀 ZEN-Bot Ultimate (Key-Free)",
154
  description = (
155
  "**Commands** \n"
156
+ "• Normal chat \n"
157
  "• `/math 2**5 / (sin(0.5)+1)` \n"
158
  "• `/summarize <text>` \n"
159
  "• `/translate_es Hello!` \n"
160
  "• `/ascii ZEN` \n"
161
+ "• Ask factual questions ending with `?` (remembers topic for pronouns)"
162
  ),
 
 
163
  examples = [
164
  "Who discovered penicillin?",
165
  "/ascii AI ROCKS",
166
  "/math sqrt(144)+log(100,10)",
167
  "/summarize The Industrial Revolution began in Britain...",
168
  "/translate_es Good evening, friends!",
169
+ "Who discovered it?",
170
  ],
171
+ fill_height = True,
172
+ theme = "soft",
173
  )
174
 
175
  if __name__ == "__main__":