File size: 296 Bytes
2b4fbaa |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"architectures": [
"LoopFormerGPTForCausalLM"
],
"auto_map": {
"AutoConfig": "modeling_loopformer.GPTConfig",
"AutoModelForCausalLM": "modeling_loopformer.LoopFormerGPTForCausalLM"
},
"dtype": "bfloat16",
"model_type": "loopformer",
"transformers_version": "4.57.0"
}
|