{ "architectures": [ "MindiForCausalLM" ], "auto_map": { "AutoConfig": "configuration_mindi.MindiConfig", "AutoModelForCausalLM": "modeling_mindi.MindiForCausalLM", "AutoTokenizer": [ null, "tokenization_mindi.MindiTokenizer" ] }, "bos_token_id": 2, "d_ff": 4608, "d_model": 1152, "dropout": 0.1, "dtype": "float16", "eos_token_id": 3, "init_std": 0.02, "max_seq_len": 2048, "model_type": "mindi", "n_heads": 16, "n_layers": 23, "pad_token_id": 0, "rms_norm_eps": 1e-05, "tie_embeddings": true, "transformers_version": "5.4.0", "vocab_size": 50000 }