File size: 471 Bytes
c882c3e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
{
  "act": "gelu",
  "attn_dropout": 0.4,
  "c_in": 1,
  "context_points": 528,
  "d_ff": 512,
  "d_layers": 3,
  "d_model": 256,
  "dropout": 0.0,
  "e_layers": 3,
  "head_dropout": 0,
  "head_type": "prediction",
  "initializer_range": 0.02,
  "mask_mode": "patch",
  "mask_nums": 3,
  "model_type": "LightGTS",
  "n_heads": 16,
  "num_patch": 11,
  "patch_len": 48,
  "shared_embedding": true,
  "stride": 48,
  "target_dim": 192,
  "transformers_version": "4.30.2"
}