File size: 1,566 Bytes
81834f9 1f29e6a ba2062a 81834f9 ba2062a 81834f9 8fa679e 1f29e6a 81834f9 1f29e6a 81834f9 ba2062a 1f29e6a 81834f9 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 | {
"model_name": "David-partial_shared-hierarchical_tree",
"run_id": "20251012_210041",
"timestamp": "2025-10-12T21:18:40.587482",
"best_val_acc": 72.78933333333333,
"best_epoch": 3,
"final_train_acc": 75.14292307976504,
"final_train_loss": 1.963918379697002,
"scale_accuracies": {
"256": 70.08533333333334,
"512": 72.59866666666667
},
"architecture": {
"preset": "balanced",
"sharing_mode": "partial_shared",
"fusion_mode": "hierarchical_tree",
"scales": [
256,
512,
768,
1024
],
"feature_dim": 512,
"num_classes": 1000,
"use_belly": true,
"belly_expand": 2.0
},
"training": {
"dataset": "AbstractPhil/imagenet-clip-features-orderly",
"model_variant": [
"clip_vit_b16",
"clip_vit_laion_b32",
"clip_vit_b32"
],
"num_epochs": 10,
"batch_size": 1024,
"learning_rate": 0.01,
"rose_weight": "0.2\u21920.8",
"cayley_loss": false,
"optimizer": "AdamW",
"scheduler": "cosine_restarts"
},
"files": {
"weights_safetensors": "weights/David-partial_shared-hierarchical_tree/20251012_210041/best_model_acc72.79.safetensors",
"weights_pytorch": "weights/David-partial_shared-hierarchical_tree/20251012_210041/best_model.pth",
"config": "weights/David-partial_shared-hierarchical_tree/20251012_210041/david_config.json",
"training_config": "weights/David-partial_shared-hierarchical_tree/20251012_210041/train_config.json",
"tensorboard": "runs/David-partial_shared-hierarchical_tree/20251012_210041/"
}
} |