| { | |
| "architectures": [ | |
| "TinyLlama" | |
| ], | |
| "model_type": "brandon-tiny", | |
| "dim": 256, | |
| "n_layers": 24, | |
| "n_heads": 8, | |
| "n_kv_heads": 2, | |
| "vocab_size": 8192, | |
| "hidden_dim": 720, | |
| "max_seq_len": 512, | |
| "dropout": 0.05, | |
| "weight_tying": true, | |
| "norm_eps": 1e-05, | |
| "rope_theta": 10000.0, | |
| "block_sharing": true, | |
| "n_predict": 1, | |
| "dense_former": true, | |
| "value_residual": true, | |
| "n_registers": 4, | |
| "n_loops": 1, | |
| "ternary": false, | |
| "activation": "swiglu", | |
| "normalization": "rmsnorm", | |
| "position_encoding": "rope", | |
| "chat_format": "chatml", | |
| "total_parameters": 10706776 | |
| } |