{ "preset": "qwen3-16layer-c10x", "family": "qwen3", "source": "inline-preset", "output_dir": "D:\\Qwen3-80m-tinystories-A\\workspace\\outputs\\qwen3-16layer-c10x", "parameters": 10162752, "vocab_size": 2048, "hidden_size": 64, "num_hidden_layers": 24, "num_attention_heads": 4, "num_key_value_heads": 2, "rope_theta": 10000.0 }