File size: 572 Bytes
992c18f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 | {
"architectures": [
"GPJTGPT2ModelForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_gpjtgpt2.GPJTGPT2Config",
"AutoModel": "modeling_gpjtgpt2.GPJTGPT2Model",
"AutoModelForCausalLM": "modeling_gpjtgpt2.GPJTGPT2ModelForCausalLM"
},
"cfg": {
"context_length": 1024,
"drop_rate": 0.1,
"emb_dim": 768,
"n_heads": 12,
"n_layers": 12,
"qkv_bias": false,
"vocab_size": 50257
},
"dtype": "float32",
"model_type": "gpjtgpt2",
"num_hidden_layers": 12,
"transformers_version": "4.57.6",
"use_cache": false
}
|