File size: 366 Bytes
03a1aba | 1 2 3 4 5 6 7 8 9 10 | from transformers import RobertaConfig
class EmoAxisConfig(RobertaConfig):
model_type = "emoaxis"
def __init__(self, num_classes=11, freeze_upto=0, **kwargs):
kwargs["max_position_embeddings"] = 514
kwargs["type_vocab_size"] = 1
super().__init__(**kwargs)
self.num_classes = num_classes
self.freeze_upto = freeze_upto
|