File size: 450 Bytes
10c27bb | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 | {
"architectures": [
"ZZJRabbit3ForCausalLM"
],
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "zzjrabbit3.ZZJRabbit3Config",
"AutoModelForCausalLM": "zzjrabbit3.ZZJRabbit3ForCausalLM"
},
"dtype": "bfloat16",
"eos_token_id": 0,
"hidden_size": 1024,
"model_type": "zzjrabbit3",
"num_attention_heads": 8,
"num_hidden_layers": 12,
"pad_token_id": 0,
"transformers_version": "5.3.0",
"vocab_size": 100000
}
|