BikoRiko commited on
Commit
ed8d032
·
verified ·
1 Parent(s): f6e0598

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -0
app.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from transformers import pipeline, AutoTokenizer, AutoModelForCausalLM
2
+ import gradio as gr
3
+
4
+ # Load a lightweight conversational model (CPU-friendly)
5
+ model_name = "microsoft/DialoGPT-small"
6
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
7
+ model = AutoModelForCausalLM.from_pretrained(model_name)
8
+
9
+ chatbot = pipeline("text-generation", model=model, tokenizer=tokenizer, device=-1) # device=-1 forces CPU
10
+
11
+ def respond(message, history):
12
+ # Format history for DialoGPT
13
+ history_str = "\n".join([f"User: {h[0]}\nBot: {h[1]}" for h in history])
14
+ input_text = f"{history_str}\nUser: {message}\nBot:"
15
+
16
+ response = chatbot(input_text, max_length=1000, pad_token_id=tokenizer.eos_token_id)
17
+ bot_reply = response[0]['generated_text'].split("Bot:")[-1].strip()
18
+ return bot_reply
19
+
20
+ gr.ChatInterface(
21
+ respond,
22
+ title="My Chatbot",
23
+ description="A simple CPU-friendly chatbot using DialoGPT-small.",
24
+ examples=["Hello!", "How are you?", "Tell me a joke."]
25
+ ).launch()