tamil-tiny-stories / configuration_tamil_tiny_stories.py
senthil090's picture
initial model 10000 epoch
b99ff82 verified
from transformers import PretrainedConfig
class TamilTinyStoriesConfig(PretrainedConfig):
model_type = "tamil_tiny_stories"
def __init__(
self,
vocab_size=0,
original_vocab_size=None,
block_size=128,
n_embd=128,
n_head=4,
n_layer=4,
dropout=0.0,
bos_token_id=None,
eos_token_id=None,
pad_token_id=None,
unk_token_id=None,
use_cache=False,
**kwargs,
):
self.vocab_size = vocab_size
self.original_vocab_size = original_vocab_size if original_vocab_size is not None else vocab_size
self.block_size = block_size
self.n_embd = n_embd
self.n_head = n_head
self.n_layer = n_layer
self.dropout = dropout
self.hidden_size = n_embd
self.num_attention_heads = n_head
self.num_hidden_layers = n_layer
self.max_position_embeddings = block_size
self.use_cache = use_cache
self.is_decoder = True
super().__init__(
bos_token_id=bos_token_id,
eos_token_id=eos_token_id,
pad_token_id=pad_token_id,
unk_token_id=unk_token_id,
**kwargs,
)
TamilTinyStoriesConfig.register_for_auto_class()