roberta-base-semeval / configuration_emoaxis.py
arnab04's picture
Upload folder using huggingface_hub
b6e9211 verified
raw
history blame contribute delete
366 Bytes
from transformers import RobertaConfig
class EmoAxisConfig(RobertaConfig):
model_type = "emoaxis"
def __init__(self, num_classes=11, freeze_upto=0, **kwargs):
kwargs["max_position_embeddings"] = 514
kwargs["type_vocab_size"] = 1
super().__init__(**kwargs)
self.num_classes = num_classes
self.freeze_upto = freeze_upto