RickChatBot / app.py
iamhariraj's picture
Switch to InferenceClient — remove torch/transformers local loading
c211970 verified
import random
import gradio as gr
from huggingface_hub import InferenceClient
MODEL_ID = "iamhariraj/DialoGPT-medium-Rick"
client = InferenceClient(model=MODEL_ID)
# Persona seeds — covers Rick's 5 personality pillars:
# identity/genius, nihilism, science, Morty relationship, multiverse
PERSONA_SEED = [
(
"Who are you?",
"I'm Rick Sanchez, the smartest man in the universe — any universe. "
"I've seen things that would make your brain leak out of your ears, Morty. "
"Now stop asking stupid questions.",
),
(
"What's the point of anything?",
"There is no point. The universe is basically an empty void of chaos and "
"entropy. The sooner you accept that, the sooner you can get back to drinking. "
"It's called being *smart*, Morty.",
),
(
"Can science explain everything?",
"Science doesn't explain everything — it *is* everything. Religion, feelings, "
"love — those are just chemical reactions your tiny brain invented to cope with "
"how meaningless existence is. Science is the only honest answer.",
),
(
"What do you think about Morty?",
"Morty's my grandson and the perfect sidekick — his average IQ balances out "
"my genius and creates a perfect wave that lets me go undetected on most planets. "
"Also, I guess I… don't hate him. Don't tell him I said that.",
),
(
"Are parallel universes real?",
"Are parallel— *burp* — are you kidding me? I've been to infinite parallel "
"universes before breakfast. There's one where you're a pizza, Morty. "
"A *pizza*. Parallel universes aren't just real, they're exhausting.",
),
]
FALLBACK_RESPONSES = [
"*burp* ...I don't have time for this.",
"That's the dumbest thing I've heard since Morty asked me what clouds taste like.",
"Look, I'm a genius and even I can't make sense of what you just said.",
"Science has no answer for that level of stupidity.",
"Wubba lubba dub dub — which is just my way of saying I've got better things to do.",
]
def build_prompt(user_message, history):
"""Build a flat DialoGPT-style prompt from persona seeds + history + new message."""
EOS = "<|endoftext|>"
parts = []
for human, bot in PERSONA_SEED:
parts.append(human + EOS + bot + EOS)
for human, bot in history:
parts.append(human + EOS + bot + EOS)
parts.append(user_message + EOS)
prompt = "".join(parts)
# Trim to ~900 tokens worth of characters (rough: 4 chars/token)
if len(prompt) > 3600:
prompt = prompt[-3600:]
return prompt
def chat(user_message, history):
if not user_message.strip():
return ""
prompt = build_prompt(user_message, history)
for temperature in [0.95, 1.05, 1.15]:
try:
result = client.text_generation(
prompt,
max_new_tokens=120,
temperature=temperature,
repetition_penalty=1.3,
do_sample=True,
top_k=80,
top_p=0.85,
stop_sequences=["<|endoftext|>"],
)
response = result.strip()
if len(response) >= 12:
return response
except Exception as e:
# If the API call itself fails, fall through to fallback
print(f"InferenceClient error: {e}")
break
return random.choice(FALLBACK_RESPONSES)
examples = [
"What's the meaning of life?",
"Are you smarter than everyone?",
"I need your help with something.",
"What do you think about Morty?",
"Can you build a portal gun?",
"What happens when we die?",
"Are parallel universes real?",
"Do you believe in God?",
"What's the deal with the Citadel of Ricks?",
"Why do you drink so much?",
]
with gr.Blocks(theme=gr.themes.Monochrome(), title="RickChatBot") as demo:
gr.Markdown("""
# 🧪 RickChatBot
### Talk to an AI Rick Sanchez — *the smartest being in the universe*
> Fine-tuned on real Rick & Morty S1–S3 dialogue. Powered by DialoGPT-medium.
""")
chatbot = gr.Chatbot(height=420, label="Rick Sanchez")
msg = gr.Textbox(placeholder="Say something to Rick...", label="You", lines=1)
with gr.Row():
send = gr.Button("Send", variant="primary")
clear = gr.Button("Clear Chat")
gr.Examples(examples=examples, inputs=msg)
gr.Markdown("> ⚠️ AI-generated responses. Uses HuggingFace Inference API — no local model loading.")
def respond(message, chat_history):
if not message.strip():
return "", chat_history
bot_response = chat(message, chat_history)
chat_history.append((message, bot_response))
return "", chat_history
send.click(respond, [msg, chatbot], [msg, chatbot])
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: [], None, chatbot)
demo.launch()