File size: 711 Bytes
c63dbea | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 | {
"config": {
"num_nodes": 325,
"in_steps": 12,
"out_steps": 12,
"input_dim": 2,
"output_dim": 1,
"steps_per_day": 288,
"input_embedding_dim": 24,
"tod_embedding_dim": 24,
"dow_embedding_dim": 0,
"adaptive_embedding_dim": 80,
"num_heads": 4,
"num_layers": 3,
"dropout": 0.1,
"dropout_a": 0.3,
"kernel_size": [
1
],
"epochs": 100,
"batch_size": 64,
"learning_rate": 0.001,
"weight_decay": 0.0003,
"milestones": [
20,
30
],
"lr_decay_rate": 0.1,
"early_stop": 10,
"clip_grad": 0,
"device": "cuda",
"verbose": 1
},
"scaler_mean": 62.73567581176758,
"scaler_std": 9.4381742477417
} |