{ "model_type": "mini-enedina", "architectures": ["MiniEnedina"], "dim": 512, "n_layers": 7, "n_heads": 8, "head_dim": 64, "intermediate_size": 2048, "vocab_size": 8012, "max_seq_len": 14336, "norm_eps": 1e-5, "rope_theta": 10000.0, "normalization": "rmsnorm", "activation": "silu_swiglu", "positional_encoding": "rope", "weight_tying": true, "total_parameters": 37570000, "framework": "mlx", "torch_dtype": "bfloat16" }