{ "architectures": [ "EMGForCausalLM" ], "dropout": 0.01, "embedding_dim": 650, "hidden_dim": 650, "model_type": "emg", "num_layers": 1, "pad_token_id": 60004, "torch_dtype": "float32", "transformers_version": "4.52.3", "use_gradient_checkpointing": false, "use_layer_norm": true, "vocab_size": 60001, "auto_map": { "AutoConfig": "modeling_emg.EMGConfig", "AutoModel": "modeling_emg.EMGLanguageModel", "AutoModelForCausalLM": "modeling_emg.EMGForCausalLM", "AutoTokenizer": "modeling_emg.MorPieceTokenizer" } }