ChatbotTF / chatbot.py
M1NJ1's picture
Upload 3 files
eadeab6 verified
import torch
import numpy as np
from transformers import AutoModelForSequenceClassification, AutoTokenizer
# ๋ชจ๋ธ ๋กœ๋”ฉ
model = AutoModelForSequenceClassification.from_pretrained("M1NJ1/klue-bert-emotion")
tokenizer = AutoTokenizer.from_pretrained("M1NJ1/klue-bert-emotion")
model.eval().to("cuda" if torch.cuda.is_available() else "cpu")
label_map = list(range(44)) # ์‹ค์ œ ๊ฐ์ • ๋ผ๋ฒจ ์ด๋ฆ„์œผ๋กœ ์ˆ˜์ • ๊ฐ€๋Šฅ
def predict_multi(text):
device = "cuda" if torch.cuda.is_available() else "cpu"
inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=64).to(device)
with torch.no_grad():
outputs = model(**inputs)
probs = torch.sigmoid(outputs.logits).squeeze().cpu().numpy()
emotions = []
for i, p in enumerate(probs):
if p > 0.5:
emotions.append(f"๊ฐ์ •{i}")
return emotions
def generate_response(emotions, mode):
if not emotions:
return "๊ฐ์ •์„ ํŒŒ์•…ํ•˜์ง€ ๋ชปํ–ˆ์–ด์š”. ๋‹ค์‹œ ๋งํ•ด์ค„ ์ˆ˜ ์žˆ๋‚˜์š”?"
response = ""
if mode == 'T':
if '๊ฐ์ •1' in emotions:
response += "๋ถˆ์•ˆ์€ ์ค€๋น„ ๋ถ€์กฑ์—์„œ ์˜ฌ ์ˆ˜ ์žˆ์–ด์š”. ์ฐจ๊ทผํžˆ ์ •๋ฆฌํ•ด๋ณผ๊นŒ์š”?
"
elif mode == 'F':
if '๊ฐ์ •1' in emotions:
response += "๋ถˆ์•ˆํ•œ ํ•˜๋ฃจ์˜€๊ฒ ๊ตฐ์š”... ๊ดœ์ฐฎ์•„์š”, ๋‹น์‹  ์ž˜ํ•˜๊ณ  ์žˆ์–ด์š”.
"
return response or "๋‹น์‹ ์˜ ๊ฐ์ •์— ๊ท€ ๊ธฐ์šธ์ด๊ณ  ์žˆ์–ด์š”."