Spaces:
Sleeping
Sleeping
Create models/roberta_model.py
Browse files- models/roberta_model.py +20 -0
models/roberta_model.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import torch
|
| 2 |
+
import torch.nn as nn
|
| 3 |
+
from transformers import RobertaModel
|
| 4 |
+
from config import DROPOUT_RATE, ROBERTA_MODEL_NAME
|
| 5 |
+
|
| 6 |
+
class RobertaMultiOutputModel(nn.Module):
|
| 7 |
+
tokenizer_name = ROBERTA_MODEL_NAME
|
| 8 |
+
|
| 9 |
+
def __init__(self, num_labels):
|
| 10 |
+
super(RobertaMultiOutputModel, self).__init__()
|
| 11 |
+
self.roberta = RobertaModel.from_pretrained(ROBERTA_MODEL_NAME)
|
| 12 |
+
self.dropout = nn.Dropout(DROPOUT_RATE)
|
| 13 |
+
self.classifiers = nn.ModuleList([
|
| 14 |
+
nn.Linear(self.roberta.config.hidden_size, n_classes) for n_classes in num_labels
|
| 15 |
+
])
|
| 16 |
+
|
| 17 |
+
def forward(self, input_ids, attention_mask):
|
| 18 |
+
pooled_output = self.roberta(input_ids=input_ids, attention_mask=attention_mask).pooler_output
|
| 19 |
+
pooled_output = self.dropout(pooled_output)
|
| 20 |
+
return [classifier(pooled_output) for classifier in self.classifiers]
|