{ "preset": "qwen3-16layer-c10x", "family": "qwen3", "source": "inline-preset", "output_dir": "D:\\Qwen3-80m-tinystories-A\\workspace\\outputs\\qwen3-16layer-c10x", "parameters": 32515584, "vocab_size": 2048, "hidden_size": 512, "num_hidden_layers": 8, "num_attention_heads": 8, "num_key_value_heads": 4, "rope_theta": 10000.0 }