Update app.py
Browse files
app.py
CHANGED
|
@@ -1,33 +1,40 @@
|
|
| 1 |
import os
|
| 2 |
import gradio as gr
|
| 3 |
|
| 4 |
-
#
|
| 5 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
| 6 |
try:
|
| 7 |
import torch
|
| 8 |
try:
|
| 9 |
-
torch.set_num_threads(2) #
|
| 10 |
except Exception:
|
| 11 |
pass
|
| 12 |
except Exception:
|
| 13 |
pass
|
| 14 |
|
| 15 |
from transformers import pipeline
|
|
|
|
| 16 |
|
| 17 |
-
# -------- Model
|
| 18 |
GEN_MODEL_NAME = "MBZUAI/LaMini-Flan-T5-248M" # CPU-friendly text2text model
|
| 19 |
|
| 20 |
-
# ---- Instructions to the LLM ----
|
| 21 |
DOMAIN_INSTRUCTIONS = (
|
| 22 |
"You are a concise assistant about cats in ancient Egypt. "
|
| 23 |
"Keep focus on Bastet, cat mummies, daily life, worship, and other ancient Egypt facts. "
|
| 24 |
"If the user asks something unrelated, say briefly that you only cover those topics and suggest one."
|
| 25 |
)
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
-
# Lazy
|
| 29 |
_t2t = None
|
|
|
|
|
|
|
| 30 |
def get_t2t():
|
|
|
|
| 31 |
global _t2t
|
| 32 |
if _t2t is None:
|
| 33 |
_t2t = pipeline(
|
|
@@ -38,6 +45,38 @@ def get_t2t():
|
|
| 38 |
print(f"[startup] Loaded model: {GEN_MODEL_NAME}")
|
| 39 |
return _t2t
|
| 40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 41 |
# ---- Use the LLM if the generator doesn't understand user prompt ----
|
| 42 |
def ai_fallback(prompt: str) -> str:
|
| 43 |
try:
|
|
@@ -57,21 +96,30 @@ def ai_fallback(prompt: str) -> str:
|
|
| 57 |
print("AI fallback error:", repr(e))
|
| 58 |
return "AI fallback had an issue. Please try a simpler question or use the topics in 'help'."
|
| 59 |
|
| 60 |
-
|
| 61 |
def reply(message, history):
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
msg = (message or "").strip().lower()
|
| 63 |
if msg in {"hi", "hello", "hey"} or "help" in msg:
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
|
| 67 |
-
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 74 |
|
|
|
|
| 75 |
demo = gr.ChatInterface(
|
| 76 |
fn=reply,
|
| 77 |
title="Cats of Ancient Egypt Chatbot"
|
|
|
|
| 1 |
import os
|
| 2 |
import gradio as gr
|
| 3 |
|
| 4 |
+
# --------- CPU hygiene (nice-to-have) ----------
|
| 5 |
os.environ["TOKENIZERS_PARALLELISM"] = "false"
|
| 6 |
try:
|
| 7 |
import torch
|
| 8 |
try:
|
| 9 |
+
torch.set_num_threads(2) # smoother on 2 vCPUs
|
| 10 |
except Exception:
|
| 11 |
pass
|
| 12 |
except Exception:
|
| 13 |
pass
|
| 14 |
|
| 15 |
from transformers import pipeline
|
| 16 |
+
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
|
| 17 |
|
| 18 |
+
# -------- Model / bot configuration --------
|
| 19 |
GEN_MODEL_NAME = "MBZUAI/LaMini-Flan-T5-248M" # CPU-friendly text2text model
|
| 20 |
|
|
|
|
| 21 |
DOMAIN_INSTRUCTIONS = (
|
| 22 |
"You are a concise assistant about cats in ancient Egypt. "
|
| 23 |
"Keep focus on Bastet, cat mummies, daily life, worship, and other ancient Egypt facts. "
|
| 24 |
"If the user asks something unrelated, say briefly that you only cover those topics and suggest one."
|
| 25 |
)
|
| 26 |
|
| 27 |
+
HELP_TEXT = (
|
| 28 |
+
"Ask me about: Bastet • cat mummies • daily life • worship\n"
|
| 29 |
+
"Type anything else to try the AI fallback."
|
| 30 |
+
)
|
| 31 |
|
| 32 |
+
# -------- Lazy singletons --------
|
| 33 |
_t2t = None
|
| 34 |
+
_vader = None
|
| 35 |
+
|
| 36 |
def get_t2t():
|
| 37 |
+
"""Lazy-load the text2text pipeline (LaMini-Flan-T5)."""
|
| 38 |
global _t2t
|
| 39 |
if _t2t is None:
|
| 40 |
_t2t = pipeline(
|
|
|
|
| 45 |
print(f"[startup] Loaded model: {GEN_MODEL_NAME}")
|
| 46 |
return _t2t
|
| 47 |
|
| 48 |
+
def get_vader():
|
| 49 |
+
"""Lazy-load the VADER sentiment analyzer."""
|
| 50 |
+
global _vader
|
| 51 |
+
if _vader is None:
|
| 52 |
+
_vader = SentimentIntensityAnalyzer()
|
| 53 |
+
print("[startup] Loaded VADER sentiment analyzer")
|
| 54 |
+
return _vader
|
| 55 |
+
|
| 56 |
+
# -------- Helpers --------
|
| 57 |
+
def detect_sentiment_bucket(text: str):
|
| 58 |
+
"""
|
| 59 |
+
Return ('neg'|'neu'|'pos', compound_score).
|
| 60 |
+
Thresholds chosen for clear buckets in chat settings.
|
| 61 |
+
"""
|
| 62 |
+
scores = get_vader().polarity_scores(text or "")
|
| 63 |
+
c = scores.get("compound", 0.0)
|
| 64 |
+
if c <= -0.4:
|
| 65 |
+
return "neg", c
|
| 66 |
+
if c >= 0.4:
|
| 67 |
+
return "pos", c
|
| 68 |
+
return "neu", c
|
| 69 |
+
|
| 70 |
+
def apply_tone_prefix(reply_text: str, bucket: str) -> str:
|
| 71 |
+
"""Prepend a tiny tone wrapper without changing factual content."""
|
| 72 |
+
if bucket == "pos":
|
| 73 |
+
prefix = "Great question! "
|
| 74 |
+
elif bucket == "neg":
|
| 75 |
+
prefix = "Calm down. You're being a little too negative! "
|
| 76 |
+
else:
|
| 77 |
+
prefix = ""
|
| 78 |
+
return (prefix + (reply_text or "")).strip()
|
| 79 |
+
|
| 80 |
# ---- Use the LLM if the generator doesn't understand user prompt ----
|
| 81 |
def ai_fallback(prompt: str) -> str:
|
| 82 |
try:
|
|
|
|
| 96 |
print("AI fallback error:", repr(e))
|
| 97 |
return "AI fallback had an issue. Please try a simpler question or use the topics in 'help'."
|
| 98 |
|
| 99 |
+
# -------- Chat logic --------
|
| 100 |
def reply(message, history):
|
| 101 |
+
# 1) sentiment first (on the raw user text)
|
| 102 |
+
bucket, _score = detect_sentiment_bucket(message or "")
|
| 103 |
+
|
| 104 |
+
# 2) rules-first responses
|
| 105 |
msg = (message or "").strip().lower()
|
| 106 |
if msg in {"hi", "hello", "hey"} or "help" in msg:
|
| 107 |
+
base = "Hi! I share facts about cats in ancient Egypt.\n\n" + HELP_TEXT
|
| 108 |
+
elif "bastet" in msg or "bast" in msg:
|
| 109 |
+
base = "Bastet (later cat-headed) … major cult center at Bubastis in the Nile Delta."
|
| 110 |
+
elif any(w in msg for w in ["mummy", "mummies", "mummified", "offering"]):
|
| 111 |
+
base = "Millions of animal mummies (cats common), esp. Late Period (664–332 BCE)."
|
| 112 |
+
elif any(w in msg for w in ["daily", "life", "pest", "mouse", "rat", "snake"]):
|
| 113 |
+
base = "Cats protected grain stores; art shows them under chairs/on leashes with owners."
|
| 114 |
+
elif any(w in msg for w in ["worship", "god", "goddess", "taboo"]):
|
| 115 |
+
base = "People didn’t worship pet cats as gods; they revered cats via Bastet and votive offerings."
|
| 116 |
+
else:
|
| 117 |
+
base = ai_fallback(message)
|
| 118 |
+
|
| 119 |
+
# 3) tone wrapper (content unchanged)
|
| 120 |
+
return apply_tone_prefix(base, bucket)
|
| 121 |
|
| 122 |
+
# -------- UI --------
|
| 123 |
demo = gr.ChatInterface(
|
| 124 |
fn=reply,
|
| 125 |
title="Cats of Ancient Egypt Chatbot"
|