{ "model_type": "multiscale_transformer", "architectures": [ "MultiScaleForCausalLM" ], "vocab_size": 258, "d_model": 224, "n_heads": 4, "d_ff": 608, "n_layers_per_scale": 3, "n_cross_attn_layers": 1, "max_seq_len": 512, "dropout": 0.0, "bias": false, "rope_theta": 10000.0, "downsample_factors": [ 1, 2, 4 ], "num_parameters": 9259040, "training_results": { "model": "AXL-Docs-8M", "params": 9870112, "steps": 271, "time": 120.31880593299866, "final_loss": 0.01961960271000862, "perplexity": 1.03, "max_seq_len": 512, "context_window": "512 bytes" } }