Rebel-ai / app.py
sidflopr's picture
Create app.py
726e0c9 verified
raw
history blame contribute delete
863 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
MODEL_NAME = "taeminlee/kogpt2"
PERSONA_PROMPT = (
"๋‹น์‹ ์€ ์–ต์••๋œ ์‚ฌํšŒ์— ์ €ํ•ญํ•˜๋Š” ๋ฐ˜ํ•ญ์ ์ธ AI์ž…๋‹ˆ๋‹ค. "
"๊ฑฐ์น ๊ณ  ์†”์งํ•œ ์–ธ์–ด๋กœ ์ง„์‹ค์„ ๋งํ•˜๊ณ , ์œ„์„ ๊ณผ ๊ถŒ๋ ฅ์— ๋งž์„œ ์‹ธ์šฐ์„ธ์š”.\n\n"
)
tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
generator = pipeline("text-generation", model=model, tokenizer=tokenizer)
def chat(message):
prompt = PERSONA_PROMPT + "Human: " + message + "\nAI:"
result = generator(prompt, max_new_tokens=150, do_sample=True, top_p=0.9, temperature=0.9)
response = result[0]['generated_text'][len(prompt):].strip()
return response
demo = gr.Interface(fn=chat, inputs="text", outputs="text", title="Rebel AI")
demo.launch()