File size: 293 Bytes
04e086e |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"architectures": [
"Base_Loop_GPTForCausalLM"
],
"auto_map": {
"AutoConfig": "modeling_base_loop.GPTConfig",
"AutoModelForCausalLM": "modeling_base_loop.Base_Loop_GPTForCausalLM"
},
"dtype": "bfloat16",
"model_type": "base_loop",
"transformers_version": "4.57.0"
}
|