{ "architectures": [ "Base_Loop_EE_GPTForCausalLM" ], "auto_map": { "AutoConfig": "modeling_base_loop_ee.GPTConfig", "AutoModelForCausalLM": "modeling_base_loop_ee.Base_Loop_EE_GPTForCausalLM" }, "dtype": "bfloat16", "model_type": "base_loop_ee", "transformers_version": "4.57.0" }