{ "architectures": [ "LoopFormerGPTForCausalLM" ], "auto_map": { "AutoConfig": "modeling_loopformer.GPTConfig", "AutoModelForCausalLM": "modeling_loopformer.LoopFormerGPTForCausalLM" }, "dtype": "bfloat16", "model_type": "loopformer", "transformers_version": "4.57.0" }