File size: 308 Bytes
4a3673a | 1 2 3 4 5 6 7 8 9 10 11 12 13 | {
"architectures": [
"Base_Loop_EE_GPTForCausalLM"
],
"auto_map": {
"AutoConfig": "modeling_base_loop_ee.GPTConfig",
"AutoModelForCausalLM": "modeling_base_loop_ee.Base_Loop_EE_GPTForCausalLM"
},
"dtype": "bfloat16",
"model_type": "base_loop_ee",
"transformers_version": "4.57.0"
}
|