File size: 262 Bytes
14ddb6d |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"model_type": "ChemQ3MTPForCausalLM",
"num_future_tokens": 3,
"horizon_loss_enabled": true,
"mtp_head_enabled": true,
"training_phases": [
"mtp_horizon_training"
],
"total_parameters": 9857155,
"epochs_trained": 1,
"total_steps": 145628
} |