roberta-base-emopillars-contextless / configuration_emoaxis.py
arnab04's picture
Upload folder using huggingface_hub
cc0c5cd verified
from transformers import RobertaConfig
class EmoAxisConfig(RobertaConfig):
model_type = "emoaxis"
def __init__(self, num_classes=28, freeze_upto=0, **kwargs):
kwargs["max_position_embeddings"] = 514
kwargs["type_vocab_size"] = 1
super().__init__(**kwargs)
self.num_classes = num_classes
self.freeze_upto = freeze_upto