AbstractPhil commited on
Commit
ee2e770
·
verified ·
1 Parent(s): 355de63

Update train_config.json - Run 20251012_161107

Browse files
weights/David-partial_shared-deep_efficiency/20251012_161107/train_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "david_training",
3
+ "run_id": "20251012_161107",
4
+ "dataset_name": "AbstractPhil/imagenet-clip-features-orderly",
5
+ "model_variant": "clip_vit_laion_bigg14",
6
+ "num_classes": 1000,
7
+ "preset": "clip_vit_bigg14",
8
+ "custom_config_path": null,
9
+ "num_classes_override": null,
10
+ "use_belly_override": null,
11
+ "belly_expand_override": null,
12
+ "progressive_training_override": false,
13
+ "scale_warmup_epochs_override": null,
14
+ "num_epochs": 10,
15
+ "batch_size": 1024,
16
+ "learning_rate": 0.001,
17
+ "weight_decay": 1e-05,
18
+ "warmup_epochs": 3,
19
+ "use_rose_loss": true,
20
+ "rose_initial_weight": 0.1,
21
+ "rose_max_weight": 0.5,
22
+ "rose_weight_schedule": "adaptive",
23
+ "use_cayley_loss": false,
24
+ "cayley_weight": 0.001,
25
+ "scale_loss_balance": null,
26
+ "use_mixed_precision": false,
27
+ "gradient_clip": 10.0,
28
+ "scheduler_type": "cosine_restarts",
29
+ "min_lr": 1e-06,
30
+ "freeze_strategy": "none",
31
+ "freeze_threshold": 90.0,
32
+ "unfreeze_on_plateau": true,
33
+ "patience": 10,
34
+ "track_gradients": true,
35
+ "gradient_scale_threshold": 1e-05,
36
+ "gradient_scale_multiplier": 10.0,
37
+ "log_interval": 50,
38
+ "val_interval": 1,
39
+ "save_interval": 5,
40
+ "log_fusion_weights": true,
41
+ "log_loss_components": true,
42
+ "save_format": "safetensors",
43
+ "hf_repo": "AbstractPhil/gated-david",
44
+ "upload_to_hub": true,
45
+ "base_dir": "./david_training",
46
+ "num_workers": 10,
47
+ "pin_memory": true,
48
+ "prefetch_factor": 4,
49
+ "persistent_workers": true
50
+ }