File size: 1,083 Bytes
c1d949e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 |
# run_embedding.py
# Get latent emotion vector from ROSA
import torch
from transformers import BertTokenizer
from emotion_model import Rosa
text = "The sky is golden and I feel serene."
# Define emotions and model
emotion_labels = [
"admiration", "amusement", "anger", "annoyance", "approval", "caring",
"confusion", "curiosity", "desire", "disappointment", "disapproval",
"disgust", "embarrassment", "excitement", "fear", "gratitude", "grief",
"joy", "love", "nervousness", "optimism", "pride", "realization", "relief",
"remorse", "sadness", "surprise", "neutral"
]
tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
model = Rosa(num_emotions=29, latent_dim=64, return_vector=True, emotion_labels=emotion_labels)
model.load_state_dict(torch.load("rosa.pt", map_location=torch.device("cpu")))
model.eval()
inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
with torch.no_grad():
result = model(**inputs)
embedding = result["embedding"].squeeze().tolist()
print("ROSA Emotion Embedding Vector:")
print(embedding) |