File size: 1,644 Bytes
c2d6002 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 |
import torch
import torch.nn as nn
from transformers import BertForTokenClassification
class SixOClassifier(nn.Module):
def __init__(self, clf_hidden_size, clf_num_labels):
super(SixOClassifier, self).__init__()
self.dense1 = nn.Linear(clf_hidden_size, clf_hidden_size // 2)
self.activation1 = nn.ReLU()
self.dropout1 = nn.Dropout(p=0.1)
self.output_layer = nn.Linear(clf_hidden_size // 2, clf_num_labels)
def forward(self, clf_input):
x = self.dense1(clf_input)
x = self.activation1(x)
x = self.dropout1(x)
x = self.output_layer(x)
return x
class BertForTokenClassificationWithSixO(BertForTokenClassification):
def __init__(self, config):
super().__init__(config)
self.num_labels = config.num_labels
self.classifier = SixOClassifier(config.hidden_size, config.num_labels)
self.init_weights()
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
model = super().from_pretrained(pretrained_model_name_or_path, *model_args, **kwargs)
return model
def freeze_bert(self):
"""Freezes the BERT layers to prevent their parameters from being updated during training."""
for param in self.bert.parameters():
param.requires_grad = False
print("BERT layers frozen.")
def unfreeze_bert(self):
"""Unfreezes the BERT layers to allow their parameters to be updated during training."""
for param in self.bert.parameters():
param.requires_grad = True
print("BERT layers unfrozen.")
|