AB-GPT-Base / config.json
AjithBharadwaj's picture
Update config.json
f5bc162 verified
{
"architectures": [
"CustomGPTForCausalLM"
],
"auto_map": {
"AutoConfig": "configuration_custom_gpt.CustomGPTConfig",
"AutoModelForCausalLM": "modeling_custom_gpt.CustomGPTForCausalLM"
},
"block_size": 1024,
"dtype": "float32",
"hidden_size": 768,
"intermediate_size": 2048,
"max_position_embeddings": 1024,
"model_type": "custom-gpt",
"n_embd": 768,
"n_head": 12,
"n_layer": 12,
"num_attention_heads": 12,
"num_hidden_layers": 12,
"transformers_version": "4.57.3",
"vocab_size": 50257
}