File size: 1,663 Bytes
6d6fc00
 
cae8c4c
 
 
6d6fc00
 
cae8c4c
 
359c9f3
cae8c4c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
359c9f3
cae8c4c
359c9f3
 
cae8c4c
359c9f3
cae8c4c
359c9f3
cae8c4c
 
359c9f3
 
cae8c4c
359c9f3
 
 
 
 
 
cae8c4c
359c9f3
 
 
 
 
 
bb3b8fa
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
import torch
import torch.nn as nn
from typing import Optional
from transformers.modeling_utils import PreTrainedModel
from transformers.configuration_utils import PretrainedConfig


class TextEmbedding3SmallSentimentHeadConfig(PretrainedConfig):
    model_type = "sentiment-head"

    def __init__(
        self,
        input_dim: int = 1536,
        hidden_dim: int = 512,
        dropout: float = 0.2,
        num_labels: int = 3,
        **kwargs,
    ) -> None:
        super().__init__(**kwargs)
        self.input_dim = int(input_dim)
        self.hidden_dim = int(hidden_dim)
        self.dropout = float(dropout)
        self.num_labels = int(num_labels)


class TextEmbedding3SmallSentimentHead(PreTrainedModel):
    config_class = TextEmbedding3SmallSentimentHeadConfig

    def __init__(self, config: TextEmbedding3SmallSentimentHeadConfig) -> None:
        super().__init__(config)

        if config.hidden_dim and config.hidden_dim > 0:
            self.net = nn.Sequential(
                nn.Linear(config.input_dim, config.hidden_dim),
                nn.ReLU(),
                nn.Dropout(p=config.dropout),
                nn.Linear(config.hidden_dim, config.num_labels),
            )
        else:
            self.net = nn.Linear(config.input_dim, config.num_labels)

        self.post_init()

    def forward(
        self,
        inputs_embeds: torch.FloatTensor,
        labels: Optional[torch.LongTensor] = None,
        **kwargs,
    ):
        logits = self.net(inputs_embeds)
        loss = None
        if labels is not None:
            loss = nn.CrossEntropyLoss()(logits, labels)
        return {"logits": logits, "loss": loss}