iamhariraj commited on
Commit
bc7b0e9
·
verified ·
1 Parent(s): 72a498e

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +67 -36
app.py CHANGED
@@ -1,51 +1,82 @@
1
- import os
2
- import random
3
- import threading
4
-
5
- import discord
6
  import gradio as gr
7
- from discord import app_commands
8
- from discord.ext import commands
9
 
 
 
 
10
 
11
- # HF GUILD SETTINGS
12
- MY_GUILD_ID = 1077674588122648679 if os.getenv("TEST_ENV", False) else 879548962464493619
13
- MY_GUILD = discord.Object(id=MY_GUILD_ID)
14
- DISCORD_TOKEN = os.environ.get("DISCORD_TOKEN", None)
 
 
 
15
 
 
 
16
 
17
- class Bot(commands.Bot):
18
- """This structure allows slash commands to work instantly."""
 
19
 
20
- def __init__(self):
21
- super().__init__(command_prefix="/", intents=discord.Intents.all())
 
 
 
 
 
 
 
 
 
22
 
23
- async def setup_hook(self):
24
- await self.tree.sync(guild=discord.Object(MY_GUILD_ID))
25
- print(f"Synced slash commands for {self.user}.")
 
 
26
 
27
 
28
- client = Bot()
 
 
 
 
 
 
 
 
29
 
 
 
 
 
 
 
30
 
31
- @client.event
32
- async def on_ready():
33
- print(f"Logged in as {client.user} (ID: {client.user.id})")
34
- print("------")
35
 
 
 
 
36
 
37
- def run_bot():
38
- client.run(DISCORD_TOKEN)
39
 
 
40
 
41
- threading.Thread(target=run_bot).start()
42
- """This allows us to run the Discord bot in a Python thread"""
43
- with gr.Blocks() as demo:
44
- gr.Markdown("""
45
- # Huggingbots Server
46
- This space hosts the huggingbots discord bot.
47
- Currently supported models are Falcon and DeepfloydIF
48
- """)
49
- demo.queue(concurrency_count=100)
50
- demo.queue(max_size=100)
51
- demo.launch()
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
 
5
+ tokenizer = AutoTokenizer.from_pretrained("iamhariraj/DialoGPT-small-Rick")
6
+ model = AutoModelForCausalLM.from_pretrained("iamhariraj/DialoGPT-small-Rick")
7
+ model.eval()
8
 
9
+ def chat(user_message, history):
10
+ # Build conversation history into input
11
+ input_ids = None
12
+ for human, bot in history:
13
+ h = tokenizer.encode(human + tokenizer.eos_token, return_tensors="pt")
14
+ b = tokenizer.encode(bot + tokenizer.eos_token, return_tensors="pt")
15
+ input_ids = torch.cat([input_ids, h, b], dim=-1) if input_ids is not None else torch.cat([h, b], dim=-1)
16
 
17
+ new_input = tokenizer.encode(user_message + tokenizer.eos_token, return_tensors="pt")
18
+ input_ids = torch.cat([input_ids, new_input], dim=-1) if input_ids is not None else new_input
19
 
20
+ # Keep context window manageable
21
+ if input_ids.shape[-1] > 800:
22
+ input_ids = input_ids[:, -800:]
23
 
24
+ with torch.no_grad():
25
+ output = model.generate(
26
+ input_ids,
27
+ max_length=input_ids.shape[-1] + 100,
28
+ pad_token_id=tokenizer.eos_token_id,
29
+ no_repeat_ngram_size=3,
30
+ do_sample=True,
31
+ top_k=100,
32
+ top_p=0.7,
33
+ temperature=0.8
34
+ )
35
 
36
+ response = tokenizer.decode(
37
+ output[:, input_ids.shape[-1]:][0],
38
+ skip_special_tokens=True
39
+ )
40
+ return response
41
 
42
 
43
+ examples = [
44
+ "What's the meaning of life?",
45
+ "Are you smarter than everyone?",
46
+ "I need your help with something.",
47
+ "What do you think about Morty?",
48
+ "Can you build a portal gun?",
49
+ "What happens when we die?",
50
+ "Are parallel universes real?",
51
+ ]
52
 
53
+ with gr.Blocks(theme=gr.themes.Monochrome(), title="RickChatBot") as demo:
54
+ gr.Markdown("""
55
+ # 🧪 RickChatBot
56
+ ### Talk to an AI Rick Sanchez — *the smartest being in the universe*
57
+ > Fine-tuned on Rick & Morty dialogue. Powered by DialoGPT.
58
+ """)
59
 
60
+ chatbot = gr.Chatbot(height=400, label="Rick Sanchez")
61
+ msg = gr.Textbox(placeholder="Say something to Rick...", label="You", lines=1)
 
 
62
 
63
+ with gr.Row():
64
+ send = gr.Button("Send", variant="primary")
65
+ clear = gr.Button("Clear Chat")
66
 
67
+ gr.Examples(examples=examples, inputs=msg)
 
68
 
69
+ gr.Markdown("> ⚠️ AI-generated responses. May take ~30s on first load (Space waking up).")
70
 
71
+ def respond(message, chat_history):
72
+ if not message.strip():
73
+ return "", chat_history
74
+ bot_response = chat(message, chat_history)
75
+ chat_history.append((message, bot_response))
76
+ return "", chat_history
77
+
78
+ send.click(respond, [msg, chatbot], [msg, chatbot])
79
+ msg.submit(respond, [msg, chatbot], [msg, chatbot])
80
+ clear.click(lambda: [], None, chatbot)
81
+
82
+ demo.launch()