seanhacks commited on
Commit
e87708f
·
verified ·
1 Parent(s): a75625c

model: graph_conv-mlp_h128_l3_edge_prediction | (graph_conv-mlp_h128_l3) | WandB: aqyutqg5

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -65,13 +65,13 @@
65
  "optimizer": "adamw",
66
  "scheduler": "onecycle",
67
  "gradient_clip_val": null,
68
- "epochs": 500,
69
  "batches_per_epoch": 20,
70
  "accelerator": "auto",
71
  "devices": 1,
72
  "precision": 32,
73
  "early_stopping": true,
74
- "early_stopping_patience": 50,
75
  "early_stopping_metric": "val_relation_weighted_auc",
76
  "save_checkpoints": true,
77
  "checkpoint_subdir": "checkpoints",
@@ -93,7 +93,7 @@
93
  "hidden_128",
94
  "layers_3",
95
  "lr_0.0005",
96
- "epochs_500"
97
  ],
98
  "log_model": false,
99
  "mode": "online",
 
65
  "optimizer": "adamw",
66
  "scheduler": "onecycle",
67
  "gradient_clip_val": null,
68
+ "epochs": 1000,
69
  "batches_per_epoch": 20,
70
  "accelerator": "auto",
71
  "devices": 1,
72
  "precision": 32,
73
  "early_stopping": true,
74
+ "early_stopping_patience": 150,
75
  "early_stopping_metric": "val_relation_weighted_auc",
76
  "save_checkpoints": true,
77
  "checkpoint_subdir": "checkpoints",
 
93
  "hidden_128",
94
  "layers_3",
95
  "lr_0.0005",
96
+ "epochs_1000"
97
  ],
98
  "log_model": false,
99
  "mode": "online",