pchen182224's picture
Upload 9 files
c882c3e verified
raw
history blame contribute delete
471 Bytes
{
"act": "gelu",
"attn_dropout": 0.4,
"c_in": 1,
"context_points": 528,
"d_ff": 512,
"d_layers": 3,
"d_model": 256,
"dropout": 0.0,
"e_layers": 3,
"head_dropout": 0,
"head_type": "prediction",
"initializer_range": 0.02,
"mask_mode": "patch",
"mask_nums": 3,
"model_type": "LightGTS",
"n_heads": 16,
"num_patch": 11,
"patch_len": 48,
"shared_embedding": true,
"stride": 48,
"target_dim": 192,
"transformers_version": "4.30.2"
}