File size: 266 Bytes
07ebdbd | 1 2 3 4 5 6 7 8 9 10 11 12 13 | {
"architectures": [
"BaseGPTForCausalLM"
],
"auto_map": {
"AutoConfig": "modeling_base.GPTConfig",
"AutoModelForCausalLM": "modeling_base.BaseGPTForCausalLM"
},
"dtype": "bfloat16",
"model_type": "base",
"transformers_version": "4.57.0"
}
|