File size: 343 Bytes
9d4ae95 1126359 698b5ef 1126359 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"model_type": "challenger",
"vocab_size": 100288,
"n_layer": 16,
"n_head": 16,
"n_embd": 1536,
"architectures": ["ChallengerForCausalLM"],
"auto_map": {
"AutoConfig": "configuration_challenger.ChallengerConfig",
"AutoModelForCausalLM": "modeling_challenger.ChallengerForCausalLM"
}
}
|