File size: 338 Bytes
3a0dae7 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 | {
"architectures": [
"ZZJRabbit3ForCausalLM"
],
"attention_dropout": 0.0,
"dtype": "float32",
"eos_token_id": 0,
"hidden_size": 1024,
"model_type": "zzjrabbit3",
"num_attention_heads": 8,
"num_hidden_layers": 12,
"pad_token_id": 0,
"transformers_version": "5.3.0",
"use_cache": false,
"vocab_size": 100000
}
|