HTayy commited on
Commit
10e19ff
·
verified ·
1 Parent(s): 35903d6

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +43 -0
config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": "flowers102",
3
+ "data_dir": "./data",
4
+ "batch_size": 16,
5
+ "epochs": 300,
6
+ "learning_rate": 0.005,
7
+ "warmup_lr": 1e-05,
8
+ "min_lr": 1e-05,
9
+ "num_epochs_warmup": 25,
10
+ "weight_decay": 0.05,
11
+ "num_workers": 4,
12
+ "model_type": "only-gla",
13
+ "model_size": "tiny",
14
+ "patch_size": 4,
15
+ "stride": 4,
16
+ "alpha": 0.5,
17
+ "drop_rate": 0.5,
18
+ "classification_mode": "avgpool",
19
+ "patch_embed_version": "v2",
20
+ "use_out_act": true,
21
+ "use_out_gate": true,
22
+ "if_abs_pos_embed": true,
23
+ "if_cls_token": false,
24
+ "use_middle_cls_token": false,
25
+ "use_clstoken": false,
26
+ "toeplitz_bandwidth": 32,
27
+ "mlp_ratio": 4,
28
+ "use_wavelet": false,
29
+ "start_decay": 2250,
30
+ "prune_range": 100,
31
+ "prune_ratio": 0.4,
32
+ "output_dir": "./output",
33
+ "checkpoint_path": "./checkpoint.pth",
34
+ "use_wandb": false,
35
+ "is_GLA_first": false,
36
+ "is_PE_onlyGLA": true,
37
+ "is_PE_onlyFFT": false,
38
+ "img_size": 224,
39
+ "num_classes": 102,
40
+ "depth": 12,
41
+ "num_heads": 6,
42
+ "embed_dim": 192
43
+ }