{ "architectures": [ "HFETM" ], "block_size": 512, "dtype": "float32", "is_decoder": true, "model_type": "etm", "n_embd": 512, "n_head": 16, "n_layer": 4, "transformers_version": "4.57.1", "vocab_size": 30000, "auto_map": { "AutoModelForCausalLM": "modeling_etm.HFETM", "AutoConfig": "modeling_etm.ETMConfig" } }