File size: 1,326 Bytes
e571785 c5864fb e571785 6b6f135 e571785 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 |
from torch import nn
from easydict import EasyDict as MyEasyDict
from transformers import BertModel, PreTrainedModel, BertConfig, PretrainedConfig
class BertConfig(PretrainedConfig):
model_type = "bert"
def __init__(
self,
model_config=None,
**kwargs):
super().__init__(**kwargs)
self.model_config = MyEasyDict(model_config)
class BERTClassifier(PreTrainedModel):
config_class = BertConfig
def __init__(self, config):
super().__init__(config)
self.bert = BertModel(config)
self.dropout = nn.Dropout(0.1)
self.fc = nn.Linear(self.bert.config.hidden_size, 16)
def forward(self, input_ids, attention_mask):
outputs = self.bert(input_ids=input_ids, attention_mask=attention_mask)
pooled_output = outputs.pooler_output
x = self.dropout(pooled_output)
logits = self.fc(x)
return logits
def print_test(self, x):
return "lmao"
if __name__ == "__main__":
from transformers import BertConfig, BertModel, BertForMaskedLM, AutoConfig
# Initializing a BERT google-bert/bert-base-uncased style configuration
config = AutoConfig.from_pretrained('google-bert/bert-base-uncased', trust_remote_code=True)
model = BERTClassifier(config) |