File size: 1,512 Bytes
c1d949e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
# ROSA: Recursive Ontology of Semantic Affect
# Sublime Emotional System by Willinton Triana Cardona

import torch
from transformers import BertTokenizer
from model.emotion_model import Rosa

# Emotion labels (GoEmotions: 28 + neutral)
emotion_labels = [
    "admiration", "amusement", "anger", "annoyance", "approval", "caring", "confusion", "curiosity",
    "desire", "disappointment", "disapproval", "disgust", "embarrassment", "excitement", "fear",
    "gratitude", "grief", "joy", "love", "nervousness", "optimism", "pride", "realization", "relief",
    "remorse", "sadness", "surprise", "neutral"
]

# Load tokenizer and model
tokenizer = BertTokenizer.from_pretrained("bert-base-uncased")
model = Rosa(model_name="bert-base-uncased", num_emotions=28, emotion_labels=emotion_labels)
model.load_state_dict(torch.load("rosa.pt", map_location=torch.device("cpu")))
model.eval()

# Inference function
def predict(text: str):
    inputs = tokenizer(text, return_tensors="pt", padding=True, truncation=True)
    with torch.no_grad():
        output = model(input_ids=inputs["input_ids"], attention_mask=inputs["attention_mask"])
        probs = torch.sigmoid(output["logits"]).squeeze()
    return list(zip(emotion_labels, probs.tolist()))

# CLI or API test
if __name__ == "__main__":
    test_text = "My heart is filled with longing and beauty."
    results = predict(test_text)

    print("\n🌹 Rosa's Emotional Reading:\n")
    for emotion, score in results:
        print(f"  → {emotion}: {score:.4f}")