sarcastic-model / config.json
dev-das's picture
Update config.json
afb5828 verified
{
"architectures": ["MyGPTForCausalLM"],
"model_type": "my_gpt",
"vocab_size": 50257,
"hidden_size": 256,
"num_attention_heads": 4,
"num_hidden_layers": 12,
"max_position_embeddings": 256,
"drop_rate": 0.1,
"qkv_bias": false,
"bos_token_id": 50256,
"eos_token_id": 50256,
"pad_token_id": 50256,
"auto_map": {
"AutoConfig": "configuration_my_gpt.MyGPTConfig",
"AutoModelForCausalLM": "modeling_my_gpt.MyGPTForCausalLM"
}
}