sigpt_small_v1 / final_model.eqx.json
anhth's picture
Upload folder using huggingface_hub
a893612 verified
{
"step": 99999,
"metrics": {
"train/loss": 0.32741448283195496,
"train/step": 99999
},
"config": {
"model": {
"sig_dim": "8",
"sig_level": "3",
"patch_len": "16",
"num_patch": "128",
"num_layers": "6",
"num_heads": "8",
"mlp_scale": "4",
"use_bias": "True",
"input_dim": "1",
"is_cross_attn": "False",
"encoder_input_dim": "2",
"encoder_patch_len": "16"
},
"training": {
"batch_size": "128",
"lr": "5e-05",
"seed": "0",
"num_iters": "100000",
"lr_schedule": "cosine",
"lr_warmup_steps": "1000",
"lr_alpha": "0.04",
"debug": "False",
"adam_beta1": "0.9",
"adam_beta2": "0.95",
"adam_epsilon": "1e-08",
"weight_decay": "0.0",
"weight_decay_modules": ".*attn.*weight|.*mlp.*weight|.*sig.*weight|",
"max_grad_norm": "1.0",
"use_wandb": "True",
"wandb_project": "sigpt",
"wandb_name": "small",
"wandb_log_freq": "1",
"wandb_save_code": "True",
"checkpoint_dir": "./checkpoints",
"checkpoint_freq": "1000",
"checkpoint_keep_n": "1"
},
"data": {
"name": "UTSD",
"root_path": "data/Nature"
}
}
}