pratchya-3k / config.json
Shinapri's picture
Upload PratchyaForCausalLM
3307e95 verified
{
"architectures": [
"PratchyaForCausalLM"
],
"aux_loss_coeff": 0.01,
"dtype": "float32",
"embed_size": 512,
"intermediate_size": 1408,
"kv_lora_rank": 128,
"mamba_d_conv": 4,
"mamba_d_state": 64,
"mamba_every_n_layers": 3,
"mamba_expand": 2,
"mamba_num_heads": 16,
"max_seq_len": 512,
"model_type": "pratchya",
"num_attention_heads": 8,
"num_experts": 8,
"num_experts_per_tok": 2,
"num_initial_ffn_layers": 4,
"num_kv_heads": 2,
"num_layers": 24,
"num_pipes": 4,
"q_lora_rank": 192,
"qk_nope_head_dim": 64,
"qk_rope_head_dim": 32,
"rms_norm_eps": 1e-06,
"rope_theta": 10000.0,
"sinkhorn_iters": 5,
"tie_word_embeddings": true,
"transformers_version": "5.3.0",
"use_cache": false,
"v_head_dim": 64,
"vocab_size": 132748
}