frawdllm-100m / config.json
tsingla98's picture
Upload FrawdLLMForCausalLM
47bd780 verified
{
"architectures": [
"FrawdLLMForCausalLM"
],
"auto_map": {
"AutoConfig": "hf_wrapper.FrawdLLMConfig",
"AutoModelForCausalLM": "hf_wrapper.FrawdLLMForCausalLM"
},
"bos_token_id": 2,
"context_length": 1024,
"dropout": 0.1,
"dtype": "float32",
"eos_token_id": 3,
"hidden_size": 768,
"model_type": "frawdllm",
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"transformers_version": "4.57.3",
"use_rmsnorm": false,
"use_rope": true,
"use_swiglu": false,
"vocab_size": 32000
}