File size: 1,246 Bytes
a893612 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 | {
"step": 99999,
"metrics": {
"train/loss": 0.32741448283195496,
"train/step": 99999
},
"config": {
"model": {
"sig_dim": "8",
"sig_level": "3",
"patch_len": "16",
"num_patch": "128",
"num_layers": "6",
"num_heads": "8",
"mlp_scale": "4",
"use_bias": "True",
"input_dim": "1",
"is_cross_attn": "False",
"encoder_input_dim": "2",
"encoder_patch_len": "16"
},
"training": {
"batch_size": "128",
"lr": "5e-05",
"seed": "0",
"num_iters": "100000",
"lr_schedule": "cosine",
"lr_warmup_steps": "1000",
"lr_alpha": "0.04",
"debug": "False",
"adam_beta1": "0.9",
"adam_beta2": "0.95",
"adam_epsilon": "1e-08",
"weight_decay": "0.0",
"weight_decay_modules": ".*attn.*weight|.*mlp.*weight|.*sig.*weight|",
"max_grad_norm": "1.0",
"use_wandb": "True",
"wandb_project": "sigpt",
"wandb_name": "small",
"wandb_log_freq": "1",
"wandb_save_code": "True",
"checkpoint_dir": "./checkpoints",
"checkpoint_freq": "1000",
"checkpoint_keep_n": "1"
},
"data": {
"name": "UTSD",
"root_path": "data/Nature"
}
}
} |