File size: 3,233 Bytes
bed30b5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
import gradio as gr
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
import numpy as np

# Load model and tokenizer
model_name = "Aakash22134/bert-twitter-sentiment-classifier"

print("Loading model...")
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)
print("Model loaded successfully!")

# Emotion labels
id2label = {
    0: "Sadness 😢",
    1: "Joy 😊", 
    2: "Love ❤️",
    3: "Anger 😠",
    4: "Fear 😨",
    5: "Surprise 😲"
}

def predict_emotion(text):
    """
    Predict emotion from text
    """
    if not text.strip():
        return {label: 0.0 for label in id2label.values()}
    
    # Tokenize
    inputs = tokenizer(
        text,
        return_tensors="pt",
        truncation=True,
        max_length=512,
        padding=True
    )
    
    # Predict
    with torch.no_grad():
        outputs = model(**inputs)
        predictions = torch.nn.functional.softmax(outputs.logits, dim=-1)
        probabilities = predictions[0].numpy()
    
    # Create results dictionary
    results = {id2label[i]: float(probabilities[i]) for i in range(len(probabilities))}
    
    return results

# Example texts
examples = [
    ["I just got promoted at work! This is the best day ever!"],
    ["I can't believe they cancelled my favorite show. I'm so upset right now."],
    ["Missing you so much. Can't wait to see you again."],
    ["Why does everything always go wrong for me? This is so frustrating!"],
    ["I'm really worried about the test tomorrow. What if I fail?"],
    ["Oh wow! I never expected this to happen!"]
]

# Create Gradio interface
demo = gr.Interface(
    fn=predict_emotion,
    inputs=gr.Textbox(
        label="Enter your text",
        placeholder="Type something here...",
        lines=3
    ),
    outputs=gr.Label(label="Emotion Prediction", num_top_classes=6),
    title="🎭 Twitter Sentiment Classifier",
    description="""
    This AI model analyzes text and predicts the underlying emotion.
    It can detect 6 different emotions: **Sadness, Joy, Love, Anger, Fear, and Surprise**.
    
    **How to use:** Simply type or paste any text and the model will predict the emotion!
    """,
    examples=examples,
    theme=gr.themes.Soft(),
    article="""
    ### About This Model
    - **Base Model**: BERT (bert-base-uncased)
    - **Task**: Multi-class emotion classification
    - **Test Accuracy**: 90.06%
    - **Training Data**: 16,000 emotion-labeled tweets
    - **Classes**: Sadness, Joy, Love, Anger, Fear, Surprise
    
    ### Performance Metrics
    | Emotion | Precision | Recall | F1-Score |
    |---------|-----------|--------|----------|
    | Sadness | 0.93 | 0.95 | 0.94 |
    | Joy | 0.92 | 0.91 | 0.92 |
    | Love | 0.76 | 0.75 | 0.76 |
    | Anger | 0.91 | 0.91 | 0.91 |
    | Fear | 0.89 | 0.88 | 0.88 |
    | Surprise | 0.75 | 0.72 | 0.74 |
    
    ### Limitations
    - Works best with English text
    - Trained on Twitter/social media style text
    - May not perform as well on formal or technical text
    - Shorter texts (like tweets) work better than very long texts
    """
)

if __name__ == "__main__":
    demo.launch()