AbstractPhil commited on
Commit
8328d9a
·
verified ·
1 Parent(s): d8fdef8

Update metrics - Run 20251104_152832

Browse files
Files changed (1) hide show
  1. best_model.json +20 -22
best_model.json CHANGED
@@ -1,33 +1,31 @@
1
  {
2
- "model_name": "David-decoupled-deep_efficiency",
3
- "run_id": "20251104_151233",
4
- "timestamp": "2025-11-04T15:26:20.020158",
5
- "best_val_acc": 75.774,
6
- "best_epoch": 4,
7
- "final_train_acc": 81.50350422700554,
8
- "final_train_loss": 1.817704427256567,
9
  "scale_accuracies": {
10
- "256": 72.676,
11
- "512": 75.102,
12
- "768": 75.774,
13
- "1024": 76.074,
14
- "1280": 75.996
15
  },
16
  "architecture": {
17
- "preset": "high_accuracy",
18
  "sharing_mode": "decoupled",
19
- "fusion_mode": "deep_efficiency",
20
  "scales": [
21
  256,
22
  512,
23
  768,
24
- 1024,
25
- 1280
26
  ],
27
  "feature_dim": 512,
28
  "num_classes": 1000,
29
  "use_belly": true,
30
- "belly_expand": 2.5
31
  },
32
  "training": {
33
  "dataset": "AbstractPhil/imagenet-clip-features-orderly",
@@ -41,10 +39,10 @@
41
  "scheduler": "cosine_restarts"
42
  },
43
  "files": {
44
- "weights_safetensors": "weights/David-decoupled-deep_efficiency/20251104_151233/best_model_acc75.77.safetensors",
45
- "weights_pytorch": "weights/David-decoupled-deep_efficiency/20251104_151233/best_model.pth",
46
- "config": "weights/David-decoupled-deep_efficiency/20251104_151233/david_config.json",
47
- "training_config": "weights/David-decoupled-deep_efficiency/20251104_151233/train_config.json",
48
- "tensorboard": "runs/David-decoupled-deep_efficiency/20251104_151233/"
49
  }
50
  }
 
1
  {
2
+ "model_name": "David-decoupled-cantor_scale",
3
+ "run_id": "20251104_152832",
4
+ "timestamp": "2025-11-04T15:30:39.057494",
5
+ "best_val_acc": 73.93,
6
+ "best_epoch": 0,
7
+ "final_train_acc": 72.55915895429713,
8
+ "final_train_loss": 3.1812268528422023,
9
  "scale_accuracies": {
10
+ "256": 68.302,
11
+ "512": 72.274,
12
+ "768": 72.712,
13
+ "1024": 73.272
 
14
  },
15
  "architecture": {
16
+ "preset": "clip_vit_b16_cantor",
17
  "sharing_mode": "decoupled",
18
+ "fusion_mode": "cantor_scale",
19
  "scales": [
20
  256,
21
  512,
22
  768,
23
+ 1024
 
24
  ],
25
  "feature_dim": 512,
26
  "num_classes": 1000,
27
  "use_belly": true,
28
+ "belly_expand": 2.0
29
  },
30
  "training": {
31
  "dataset": "AbstractPhil/imagenet-clip-features-orderly",
 
39
  "scheduler": "cosine_restarts"
40
  },
41
  "files": {
42
+ "weights_safetensors": "weights/David-decoupled-cantor_scale/20251104_152832/best_model_acc73.93.safetensors",
43
+ "weights_pytorch": "weights/David-decoupled-cantor_scale/20251104_152832/best_model.pth",
44
+ "config": "weights/David-decoupled-cantor_scale/20251104_152832/david_config.json",
45
+ "training_config": "weights/David-decoupled-cantor_scale/20251104_152832/train_config.json",
46
+ "tensorboard": "runs/David-decoupled-cantor_scale/20251104_152832/"
47
  }
48
  }