File size: 846 Bytes
880f5ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
import torch
import torch.nn as nn
from transformers import AutoTokenizer, AutoModelForSequenceClassification
class MyTinyBERT(nn.Module):
def __init__(self):
super().__init__()
self.bert = AutoModelForSequenceClassification.from_pretrained('cointegrated/rubert-tiny-toxicity')
for param in self.bert.parameters():
param.requires_grad = False
self.linear = nn.Sequential(
nn.Linear(5, 256),
nn.Sigmoid(),
nn.Dropout(),
nn.Linear(256, 512),
nn.Sigmoid(),
nn.Dropout(p=0.4),
nn.Linear(512, 1)
)
def forward(self, x):
bert_out = self.bert(x[0], attention_mask=x[1])
normed_bert_out = nn.functional.normalize(bert_out.logits)
out = self.linear(normed_bert_out)
return out
|