{ "num_opinion_anchors": 225, "pentachoron_dim": 512, "scales": [ 128, 256, 512 ], "vocab_size": 492, "max_seq_len": 77, "global_step": 1000, "best_val_loss": Infinity, "optimizations": { "use_gradient_checkpointing": false, "share_scale_embeddings": false }, "note": "share_scale_embeddings MUST be False to preserve multi-scale architecture" }