shivik-m3-fp32 / configuration_shivikM3.py
ziadrone's picture
Upload SHIVIK-M3 FP32 (2.43B params, 28 layers, 200K vocab)
1408be7 verified
from transformers import PretrainedConfig
class ShivikM3Config(PretrainedConfig):
model_type = "shivik-m3"
def __init__(
self,
vocab_size=200018,
hidden_size=2048,
num_hidden_layers=28,
num_attention_heads=32,
intermediate_size=7168,
kv_head_split_layer=14,
num_kv_heads=8,
num_kv_heads_high=32,
tie_word_embeddings=True,
rms_norm_eps=1e-5,
**kwargs
):
super().__init__(**kwargs)
self.vocab_size = vocab_size
self.hidden_size = hidden_size
self.num_hidden_layers = num_hidden_layers
self.num_attention_heads = num_attention_heads
self.intermediate_size = intermediate_size
self.kv_head_split_layer = kv_head_split_layer
self.num_kv_heads = num_kv_heads
self.num_kv_heads_high = num_kv_heads_high
self.tie_word_embeddings = tie_word_embeddings
self.rms_norm_eps = rms_norm_eps