florinbarbisch commited on
Commit
71483fd
·
verified ·
1 Parent(s): ca4658d

Messis | W&B Run exp-8-drop-hierarchy-no_head-0 (https://wandb.ai/crop-classification/messis/runs/cq9c3jvw)

Browse files
Files changed (2) hide show
  1. config.json +65 -75
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,76 +1,66 @@
1
- {
2
- "hparams": {
3
- "accumulate_grad_batches": 4,
4
- "backbone_weights_path": "./prithvi/models/Prithvi_100M.pt",
5
- "bands": [
6
- 0,
7
- 1,
8
- 2,
9
- 3,
10
- 4,
11
- 5
12
- ],
13
- "batch_size": 4,
14
- "debug": false,
15
- "dropout_p": 0.1,
16
- "early_stopping_metric": "val_loss",
17
- "early_stopping_mode": "min",
18
- "early_stopping_patience": 10,
19
- "experiment_group": "exp-4-backbone",
20
- "experiment_name": "exp-4-backbone-frozen",
21
- "freeze_backbone": true,
22
- "heads_spec": {
23
- "tier1": {
24
- "is_metrics_tier": true,
25
- "loss_weight": 1,
26
- "num_classes_to_predict": 6,
27
- "target_idx": 0,
28
- "type": "HierarchicalFCNHead"
29
- },
30
- "tier2": {
31
- "is_metrics_tier": true,
32
- "loss_weight": 1,
33
- "num_classes_to_predict": 17,
34
- "target_idx": 1,
35
- "type": "HierarchicalFCNHead"
36
- },
37
- "tier3": {
38
- "is_last_tier": true,
39
- "is_metrics_tier": true,
40
- "loss_weight": 1,
41
- "num_classes_to_predict": 49,
42
- "target_idx": 2,
43
- "type": "HierarchicalFCNHead"
44
- },
45
- "tier3_refinement_head": {
46
- "is_final_head": true,
47
- "is_metrics_tier": false,
48
- "loss_weight": 1,
49
- "num_classes_to_predict": 49,
50
- "target_idx": 2,
51
- "type": "LabelRefinementHead"
52
- }
53
- },
54
- "img_size": 224,
55
- "lr": 0.0001,
56
- "max_epochs": 400,
57
- "name": "frozen",
58
- "num_frames": 3,
59
- "optimizer": "Adam",
60
- "optimizer_momentum": null,
61
- "optimizer_weight_decay": null,
62
- "patch_size": 16,
63
- "test_folds": [
64
- 5
65
- ],
66
- "train_folds": [
67
- 1,
68
- 2,
69
- 3,
70
- 4
71
- ],
72
- "val_folds": [
73
- 0
74
- ]
75
- }
76
  }
 
1
+ {
2
+ "hparams": {
3
+ "accumulate_grad_batches": 1,
4
+ "backbone_weights_path": "./prithvi/models/Prithvi_100M.pt",
5
+ "bands": [
6
+ 0,
7
+ 1,
8
+ 2,
9
+ 3,
10
+ 4,
11
+ 5
12
+ ],
13
+ "batch_size": 2,
14
+ "debug": false,
15
+ "dropout_p": 0.1,
16
+ "early_stopping_metric": "val_f1_tier3_majority",
17
+ "early_stopping_mode": "max",
18
+ "early_stopping_patience": 10,
19
+ "experiment_group": "exp-8-drop-hierarchy",
20
+ "experiment_name": "exp-8-drop-hierarchy-no_head",
21
+ "freeze_backbone": true,
22
+ "heads_spec": {
23
+ "tier3": {
24
+ "is_last_tier": true,
25
+ "is_metrics_tier": true,
26
+ "loss_weight": 0,
27
+ "num_classes_to_predict": 49,
28
+ "target_idx": 2,
29
+ "type": "HierarchicalFCNHead"
30
+ },
31
+ "tier3_refinement_head": {
32
+ "is_final_head": true,
33
+ "is_metrics_tier": false,
34
+ "loss_weight": 1,
35
+ "num_classes_to_predict": 49,
36
+ "target_idx": 2,
37
+ "type": "LabelRefinementHead"
38
+ }
39
+ },
40
+ "img_size": 224,
41
+ "lr": 0.001,
42
+ "max_epochs": 4,
43
+ "name": "no_head",
44
+ "num_frames": 3,
45
+ "optimizer": "Adam",
46
+ "optimizer_momentum": null,
47
+ "optimizer_weight_decay": null,
48
+ "patch_size": 16,
49
+ "subsets": {
50
+ "train": 8,
51
+ "val": 4
52
+ },
53
+ "test_folds": [
54
+ 5
55
+ ],
56
+ "train_folds": [
57
+ 1,
58
+ 2,
59
+ 3,
60
+ 4
61
+ ],
62
+ "val_folds": [
63
+ 0
64
+ ]
65
+ }
 
 
 
 
 
 
 
 
 
 
66
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d25b9a2cb124571c34ef9bcf719ca9df5696438d3226fee62d9fdc9f7b42f95d
3
- size 713880940
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:efe514afc134acf66abdbcdbffb4c863d68de858405e7e7404844edfc6d71b23
3
+ size 709117048