{ "_attn_implementation_autoset": true, "_name_or_path": "hf-internal-testing/tiny-random-MptForCausalLM", "architectures": [ "MptForCausalLM" ], "attention_dropout": 0.1, "attn_config": { "model_type": "" }, "bos_token_id": 0, "d_model": 32, "dtype": "float32", "emb_pdrop": 0.0, "embedding_fraction": 1.0, "eos_token_id": 0, "expansion_ratio": 4, "gradient_checkpointing": false, "hidden_dropout": 0.1, "id2label": { "0": "LABEL_0", "1": "LABEL_1", "2": "LABEL_2" }, "init_device": "cpu", "initializer_range": 0.02, "is_decoder": true, "label2id": { "LABEL_0": 0, "LABEL_1": 1, "LABEL_2": 2 }, "layer_norm_epsilon": 1e-05, "learned_pos_emb": true, "logit_scale": null, "max_seq_len": 2048, "model_type": "mpt", "n_heads": 4, "n_layers": 5, "n_positions": 512, "no_bias": true, "norm_type": "low_precision_layernorm", "pad_token_id": 1023, "resid_pdrop": 0.0, "seq_length": 7, "transformers_version": "4.48.2", "type_vocab_size": 16, "use_cache": true, "verbose": 0, "vocab_size": 1024 }