import torch, torch.nn as nn from transformers import AutoModel class LabelEmbCls(nn.Module): def __init__(self, base, lbl_emb): super().__init__() self.bert = base self.lbl_E = nn.Parameter(lbl_emb, requires_grad=False) self.tau = nn.Parameter(torch.tensor(1.)) def forward(self, input_ids, attention_mask, token_type_ids=None): cls = self.bert(input_ids=input_ids, attention_mask=attention_mask, token_type_ids=token_type_ids ).last_hidden_state[:,0] return torch.matmul(cls, self.lbl_E.T) / self.tau