File size: 387 Bytes
de6781f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 | {
"backbone_pretrained": false,
"moe_config": {
"alpha": 1.0,
"apply_to_patches_only": true,
"blocks": "last6",
"bottleneck": 192,
"experts": 8,
"freeze_backbone": true,
"ortho_lambda": 0.0005,
"r": 128,
"router_mode": "top1",
"tau": 1.0,
"unfreeze_layernorm": false
},
"num_classes": 1000,
"vit_model_name": "vit_base_patch16_224"
} |