# run_embedding.py # Get latent emotion vector from ROSA import torch from transformers import BertTokenizer from emotion_model import Rosa text = "The sky is golden and I feel serene." # Define emotions and model emotion_labels = [ "admiration", "amusement", "anger", "annoyance", "approval", "caring", "confusion", "curiosity", "desire", "disappointment", "disapproval", "disgust", "embarrassment", "excitement", "fear", "gratitude", "grief", "joy", "love", "nervousness", "optimism", "pride", "realization", "relief", "remorse", "sadness", "surprise", "neutral" ] tokenizer = BertTokenizer.from_pretrained("bert-base-uncased") model = Rosa(num_emotions=29, latent_dim=64, return_vector=True, emotion_labels=emotion_labels) model.load_state_dict(torch.load("rosa.pt", map_location=torch.device("cpu"))) model.eval() inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True) with torch.no_grad(): result = model(**inputs) embedding = result["embedding"].squeeze().tolist() print("ROSA Emotion Embedding Vector:") print(embedding)