Spaces:
Sleeping
Sleeping
logreg and toxic bert
Browse files- models/model2/model.py +18 -0
models/model2/model.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class BERTClassifier(nn.Module):
|
| 2 |
+
def __init__(self):
|
| 3 |
+
super().__init__()
|
| 4 |
+
self.bert = AutoModelForSequenceClassification.from_pretrained('cointegrated/rubert-tiny-toxicity')
|
| 5 |
+
self.bert.classifier = nn.Linear(312, 312)
|
| 6 |
+
for param in self.bert.parameters():
|
| 7 |
+
param.requires_grad = False
|
| 8 |
+
self.linear = nn.Sequential(
|
| 9 |
+
nn.Linear(312, 128),
|
| 10 |
+
nn.Sigmoid(),
|
| 11 |
+
nn.Dropout(),
|
| 12 |
+
nn.Linear(128, 1)
|
| 13 |
+
)
|
| 14 |
+
|
| 15 |
+
def forward(self, x, attention_mask=None):
|
| 16 |
+
bert_out = self.bert(x, attention_mask=attention_mask).logits
|
| 17 |
+
out = self.linear(bert_out).squeeze(1)
|
| 18 |
+
return out
|