HTill commited on
Commit
1b276b7
·
verified ·
1 Parent(s): ef039b3

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +26 -19
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "activation_dropout": 0.0,
3
  "architectures": [
4
  "EATModel"
5
  ],
@@ -7,26 +7,33 @@
7
  "AutoModel": "modeling_eat.EATModel",
8
  "AutoConfig": "configuration_eat.EATConfig"
9
  },
10
- "attn_drop_rate": 0.0,
11
- "depth": 12,
12
- "drop_rate": 0.0,
13
- "embed_dim": 768,
14
- "end_drop_path_rate": 0.0,
15
- "fixed_positions": true,
16
- "in_chans": 1,
17
- "layer_norm_first": false,
18
- "mlp_ratio": 4.0,
19
  "model_type": "eat",
20
  "model_variant": "pretrain",
21
- "norm_affine": true,
22
- "norm_eps": 1e-06,
23
- "num_classes": 527,
24
- "num_heads": 12,
25
- "patch_size": 16,
26
- "post_mlp_drop": 0.0,
27
- "qkv_bias": true,
28
- "start_drop_path_rate": 0.0,
29
- "stride": 16,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  "torch_dtype": "float32",
31
  "transformers_version": "4.51.3"
32
  }
 
1
  {
2
+
3
  "architectures": [
4
  "EATModel"
5
  ],
 
7
  "AutoModel": "modeling_eat.EATModel",
8
  "AutoConfig": "configuration_eat.EATConfig"
9
  },
 
 
 
 
 
 
 
 
 
10
  "model_type": "eat",
11
  "model_variant": "pretrain",
12
+
13
+ "_comment": "--- STRUCTURAL (DO NOT TOUCH) ---",
14
+ "embed_dim": 768,
15
+ "depth": 12,
16
+ "num_heads": 12,
17
+ "patch_size": 16,
18
+ "stride": 16,
19
+ "in_chans": 1,
20
+ "num_classes": 527,
21
+ "mlp_ratio": 4.0,
22
+ "qkv_bias": true,
23
+ "layer_norm_first": false,
24
+ "norm_affine": true,
25
+ "norm_eps": 1e-06,
26
+ "fixed_positions": true,
27
+
28
+ "_comment": "--- FINE-TUNING KNOBS ---",
29
+ "drop_rate": 0.0,
30
+ "attn_drop_rate": 0.0,
31
+ "post_mlp_drop": 0.0,
32
+ "start_drop_path_rate": 0.0,
33
+ "end_drop_path_rate": 0.0,
34
+ "activation_dropout": 0.0,
35
+
36
+
37
  "torch_dtype": "float32",
38
  "transformers_version": "4.51.3"
39
  }